summaryrefslogtreecommitdiffstats
path: root/cyborg_enhancement/mitaka_version/cyborg/cyborg
diff options
context:
space:
mode:
Diffstat (limited to 'cyborg_enhancement/mitaka_version/cyborg/cyborg')
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/__init__.py22
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/__init__.py19
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/accelerator.py36
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/exception.py123
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/configuration.py157
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/base.py79
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/__init__.py41
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/base.py51
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/driver.py56
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/sysinfo.py162
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/utils.py36
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/generic_driver.py80
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/__init__.py28
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/base.py56
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/__init__.py1
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/driver.py41
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/sysinfo.py30
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/utils.py67
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/generic.py81
-rwxr-xr-xcyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/netronome.py73
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/spdk.py147
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/nvmf.py113
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/spdk.py75
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/common_fun.py206
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/nvmf_client.py119
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/py_spdk.py82
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/vhost_client.py121
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/vhost.py92
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/__init__.py0
-rwxr-xr-xcyborg_enhancement/mitaka_version/cyborg/cyborg/agent/manager.py71
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/resource_tracker.py206
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/rpcapi.py52
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/app.py66
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/config.py40
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/base.py33
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/link.py48
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/root.py72
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/__init__.py73
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/accelerators.py233
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/deployables.py210
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/ports.py269
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/types.py161
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/utils.py35
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/expose.py40
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/hooks.py112
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/__init__.py24
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/auth_token.py64
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/parsable_error.py72
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/__init__.py19
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/agent.py37
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/api.py36
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/conductor.py39
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/dbsync.py91
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/config.py29
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/constants.py18
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/exception.py202
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/i18n.py22
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/paths.py48
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/policy.py265
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/rpc.py123
-rwxr-xr-xcyborg_enhancement/mitaka_version/cyborg/cyborg/common/service.py145
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/common/utils.py41
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/handlers.py40
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/manager.py180
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/rpcapi.py192
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/__init__.py29
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/api.py58
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/database.py32
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/default.py69
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/placement.py66
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/api.py134
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/migration.py52
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic.ini54
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/README12
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/env.py61
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/script.py.mako18
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/e410080397351_create_port_table.py55
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/f50980397351_initial_migration.py101
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/api.py513
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/migration.py108
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/models.py132
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/__init__.py30
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/accelerator.py84
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/attribute.py84
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/base.py176
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/deployable.py139
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/fields.py30
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/physical_function.py137
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/port.py91
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/virtual_function.py61
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/services/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/services/report.py165
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/base.py169
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/functional/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/__init__.py38
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/base.py105
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/driver.py93
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/prepare_test_data.py295
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/test_generic.py66
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/test_nvmf.py131
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/test_vhost.py144
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/test_resource_tracker.py91
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/base.py214
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/base.py21
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/test_accelerators.py174
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/base.py71
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/utils.py31
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_accelerator.py66
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_deployable.py70
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_physical_function.py72
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_virtual_function.py72
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_accelerator.py104
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_deployable.py151
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_object.py226
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_physical_function.py186
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_virtual_function.py202
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/utils.py41
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/policy_fixture.py44
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/__init__.py0
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/test_placement_client.py123
-rw-r--r--cyborg_enhancement/mitaka_version/cyborg/cyborg/version.py19
151 files changed, 11112 insertions, 0 deletions
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/__init__.py
new file mode 100644
index 0000000..3056acf
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import eventlet
+import pbr.version
+
+
+__version__ = pbr.version.VersionInfo(
+ 'cyborg').version_string()
+
+eventlet.monkey_patch(os=False)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/__init__.py
new file mode 100644
index 0000000..d0f34f0
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/__init__.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pbr.version
+
+
+__version__ = pbr.version.VersionInfo(
+ 'cyborg').version_string()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/accelerator.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/accelerator.py
new file mode 100644
index 0000000..18cc4e9
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/accelerator.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2016-2017 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from sqlalchemy import Column, Integer, String
+from sqlalchemy.ext.declarative import declarative_base
+Base = declarative_base()
+
+
+# A common internal acclerator object for internal use.
+class Accelerator(Base):
+ __tablename__ = 'accelerators'
+ accelerator_id = Column(String, primary_key=True)
+ device_type = Column(String)
+ remoteable = Column(Integer)
+ vender_id = Column(String)
+ product_id = Column(String)
+
+ def __init__(self, **kwargs):
+ self.accelerator_id = kwargs['accelerator_id']
+ self.device_type = kwargs['device_type']
+ self.remoteable = kwargs['remoteable']
+ self.vendor_id = kwargs['vendor_id']
+ self.product_id = kwargs['product_id']
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/exception.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/exception.py
new file mode 100644
index 0000000..5999e02
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/common/exception.py
@@ -0,0 +1,123 @@
+# Copyright 2017 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Accelerator base exception handling. """
+
+import collections
+import json
+from oslo_log import log as logging
+import six
+from six.moves import http_client
+from cyborg.common.i18n import _
+
+
+LOG = logging.getLogger(__name__)
+
+
+def _ensure_exception_kwargs_serializable(exc_class_name, kwargs):
+ """Ensure that kwargs are serializable
+
+ Ensure that all kwargs passed to exception constructor can be passed over
+ RPC, by trying to convert them to JSON, or, as a last resort, to string.
+ If it is not possible, unserializable kwargs will be removed, letting the
+ receiver to handle the exception string as it is configured to.
+
+ :param exc_class_name: an AcceleratorException class name.
+ :param kwargs: a dictionary of keyword arguments passed to the exception
+ constructor.
+ :returns: a dictionary of serializable keyword arguments.
+ """
+ serializers = [(json.dumps, _('when converting to JSON')),
+ (six.text_type, _('when converting to string'))]
+ exceptions = collections.defaultdict(list)
+ serializable_kwargs = {}
+ for k, v in kwargs.items():
+ for serializer, msg in serializers:
+ try:
+ serializable_kwargs[k] = serializer(v)
+ exceptions.pop(k, None)
+ break
+ except Exception as e:
+ exceptions[k].append(
+ '(%(serializer_type)s) %(e_type)s: %(e_contents)s' %
+ {'serializer_type': msg, 'e_contents': e,
+ 'e_type': e.__class__.__name__})
+ if exceptions:
+ LOG.error("One or more arguments passed to the %(exc_class)s "
+ "constructor as kwargs can not be serialized. The "
+ "serialized arguments: %(serialized)s. These "
+ "unserialized kwargs were dropped because of the "
+ "exceptions encountered during their "
+ "serialization:\n%(errors)s",
+ dict(errors=';\n'.join("%s: %s" % (k, '; '.join(v))
+ for k, v in exceptions.items()),
+ exc_class=exc_class_name,
+ serialized=serializable_kwargs))
+ # We might be able to actually put the following keys' values into
+ # format string, but there is no guarantee, drop it just in case.
+ for k in exceptions:
+ del kwargs[k]
+ return serializable_kwargs
+
+
+class AcceleratorException(Exception):
+ """Base Accelerator Exception
+
+ To correctly use this class, inherit from it and define
+ a '_msg_fmt' property. That message will get printf'd
+ with the keyword arguments provided to the constructor.
+
+ If you need to access the message from an exception you should use
+ six.text_type(exc)
+
+ """
+ _msg_fmt = _("An unknown exception occurred.")
+ code = http_client.INTERNAL_SERVER_ERROR
+ headers = {}
+ safe = False
+
+ def __init__(self, message=None, **kwargs):
+
+ self.kwargs = _ensure_exception_kwargs_serializable(
+ self.__class__.__name__, kwargs)
+
+ if 'code' not in self.kwargs:
+ try:
+ self.kwargs['code'] = self.code
+ except AttributeError:
+ pass
+
+ if not message:
+ if kwargs:
+ message = self._msg_fmt % kwargs
+ else:
+ message = self._msg_fmt
+
+ super(AcceleratorException, self).__init__(message)
+
+
+class Invalid(AcceleratorException):
+ _msg_fmt = _("Unacceptable parameters.")
+
+
+class InvalidParameterValue(Invalid):
+ _msg_fmt = "%(err)s"
+
+
+class MissingParameterValue(InvalidParameterValue):
+ _msg_fmt = "%(err)s"
+
+
+class InvalidAccelerator(InvalidParameterValue):
+ _msg_fmt = '%(err)s' \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/configuration.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/configuration.py
new file mode 100644
index 0000000..b1c172a
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/configuration.py
@@ -0,0 +1,157 @@
+# Copyright (c) 2012 Rackspace Hosting
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Configuration support for all drivers. from openstack/cyborg"""
+
+from oslo_config import cfg
+
+CONF = cfg.CONF
+SHARED_CONF_GROUP = 'backend_defaults'
+
+
+class DefaultGroupConfiguration(object):
+ """Get config options from only DEFAULT."""
+
+ def __init__(self):
+ # set the local conf so that __call__'s know what to use
+ self.local_conf = CONF
+
+ def _ensure_config_values(self, accelerator_opts):
+ CONF.register_opts(accelerator_opts, group=None)
+
+ def append_config_values(self, accelerator_opts):
+ self._ensure_config_values(accelerator_opts)
+
+ def safe_get(self, value):
+ """get default group value from CONF
+ :param value: value.
+ :return: get default group value from CONF.
+ """
+ try:
+ return self.__getattr__(value)
+ except cfg.NoSuchOptError:
+ return None
+
+ def __getattr__(self, value):
+ """Don't use self.local_conf to avoid reentrant call to __getattr__()
+ :param value: value.
+ :return: getattr(local_conf, value).
+ """
+ local_conf = object.__getattribute__(self, 'local_conf')
+ return getattr(local_conf, value)
+
+
+class BackendGroupConfiguration(object):
+ def __init__(self, accelerator_opts, config_group=None):
+ """Initialize configuration.
+ This takes care of grafting the implementation's config
+ values into the config group and shared defaults. We will try to
+ pull values from the specified 'config_group', but fall back to
+ defaults from the SHARED_CONF_GROUP.
+ """
+ self.config_group = config_group
+
+ # set the local conf so that __call__'s know what to use
+ self._ensure_config_values(accelerator_opts)
+ self.backend_conf = CONF._get(self.config_group)
+ self.shared_backend_conf = CONF._get(SHARED_CONF_GROUP)
+
+ def _safe_register(self, opt, group):
+ try:
+ CONF.register_opt(opt, group=group)
+ except cfg.DuplicateOptError:
+ pass # If it's already registered ignore it
+
+ def _ensure_config_values(self, accelerator_opts):
+ """Register the options in the shared group.
+ When we go to get a config option we will try the backend specific
+ group first and fall back to the shared group. We override the default
+ from all the config options for the backend group so we can know if it
+ was set or not.
+ """
+ for opt in accelerator_opts:
+ self._safe_register(opt, SHARED_CONF_GROUP)
+ # Assuming they aren't the same groups, graft on the options into
+ # the backend group and override its default value.
+ if self.config_group != SHARED_CONF_GROUP:
+ self._safe_register(opt, self.config_group)
+ CONF.set_default(opt.name, None, group=self.config_group)
+
+ def append_config_values(self, accelerator_opts):
+ self._ensure_config_values(accelerator_opts)
+
+ def set_default(self, opt_name, default):
+ CONF.set_default(opt_name, default, group=SHARED_CONF_GROUP)
+
+ def get(self, key, default=None):
+ return getattr(self, key, default)
+
+ def safe_get(self, value):
+ """get config_group value from CONF
+ :param value: value.
+ :return: get config_group value from CONF.
+ """
+
+ try:
+ return self.__getattr__(value)
+ except cfg.NoSuchOptError:
+ return None
+
+ def __getattr__(self, opt_name):
+ """Don't use self.X to avoid reentrant call to __getattr__()
+ :param opt_name: opt_name.
+ :return: opt_value.
+ """
+ backend_conf = object.__getattribute__(self, 'backend_conf')
+ opt_value = getattr(backend_conf, opt_name)
+ if opt_value is None:
+ shared_conf = object.__getattribute__(self, 'shared_backend_conf')
+ opt_value = getattr(shared_conf, opt_name)
+ return opt_value
+
+
+class Configuration(object):
+ def __init__(self, accelerator_opts, config_group=None):
+ """Initialize configuration.
+ This shim will allow for compatibility with the DEFAULT
+ style of backend configuration which is used by some of the users
+ of this configuration helper, or by the volume drivers that have
+ all been forced over to the config_group style.
+ """
+ self.config_group = config_group
+ if config_group:
+ self.conf = BackendGroupConfiguration(accelerator_opts,
+ config_group)
+ else:
+ self.conf = DefaultGroupConfiguration()
+
+ def append_config_values(self, accelerator_opts):
+ self.conf.append_config_values(accelerator_opts)
+
+ def safe_get(self, value):
+ """get value from CONF
+ :param value: value.
+ :return: get value from CONF.
+ """
+
+ return self.conf.safe_get(value)
+
+ def __getattr__(self, value):
+ """Don't use self.conf to avoid reentrant call to __getattr__()
+ :param value: value.
+ :return: getattr(conf, value).
+ """
+ conf = object.__getattribute__(self, 'conf')
+ return getattr(conf, value) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/base.py
new file mode 100644
index 0000000..2706022
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/base.py
@@ -0,0 +1,79 @@
+# Copyright 2017 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Abstract base classes for drivers.
+"""
+
+import abc
+import six
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BaseDriver(object):
+ """Base class for all drivers.
+
+ Defines the abstract base class for generic and vendor drivers.
+ """
+
+ standard_interfaces = ('discover', 'list', 'update', 'attach', 'detach')
+
+ discover = None
+ """`Standard` attribute for discovering drivers.
+
+ A reference to an instance of :class:DiscoverInterface.
+ """
+
+ list = None
+ """`Core` attribute for listing drivers.
+
+ A reference to an instance of :class:ListInterface.
+ """
+
+ update = None
+ """`Standard` attribute to update drivers.
+
+ A reference to an instance of :class:UpdateInterface.
+ """
+
+ attach = None
+ """`Standard` attribute to attach accelerator to an instance.
+
+ A reference to an instance of :class:AttachInterface.
+ """
+
+ detach = None
+ """`Standard` attribute to detach accelerator to an instance.
+
+ A reference to an instance of :class:AttachInterface.
+ """
+
+ def __init__(self):
+ pass
+
+ @property
+ def all_interfaces(self):
+ return (list(self.standard_interfaces))
+
+ def get_properties(self):
+ """Gets the properties of the driver.
+
+ :returns: dictionary of <property name>:<property description> entries.
+ """
+
+ properties = {}
+ for iface_name in self.all_interfaces:
+ iface = getattr(self, iface_name, None)
+ if iface:
+ properties.update(iface.get_properties())
+ return properties
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/__init__.py
new file mode 100644
index 0000000..d6eaff6
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/__init__.py
@@ -0,0 +1,41 @@
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import glob
+
+from oslo_log import log as logging
+
+
+__import__('pkg_resources').declare_namespace(__name__)
+__import__(".".join([__package__, 'base']))
+
+
+LOG = logging.getLogger(__name__)
+
+
+def load_fpga_vendor_driver():
+ files = glob.glob(os.path.join(os.path.dirname(__file__), "*/driver*"))
+ modules = set(map(lambda s: ".".join(s.rsplit(".")[0].rsplit("/", 2)[-2:]),
+ files))
+ for m in modules:
+ try:
+ __import__(".".join([__package__, m]))
+ LOG.debug("Successfully loaded FPGA vendor driver: %s." % m)
+ except ImportError as e:
+ LOG.error("Failed to load FPGA vendor driver: %s. Details: %s"
+ % (m, e))
+
+
+load_fpga_vendor_driver() \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/base.py
new file mode 100644
index 0000000..4da1d9d
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/base.py
@@ -0,0 +1,51 @@
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg FPGA driver implementation.
+"""
+
+from cyborg.accelerator.drivers.fpga import utils
+
+
+VENDOR_MAPS = {"0x8086": "intel"}
+
+
+class FPGADriver(object):
+ """Base class for FPGA drivers.
+ This is just a virtual FPGA drivers interface.
+ Vendors should implement their specific drivers.
+ """
+
+ @classmethod
+ def create(cls, vendor, *args, **kwargs):
+ for sclass in cls.__subclasses__():
+ vendor = VENDOR_MAPS.get(vendor, vendor)
+ if vendor == sclass.VENDOR:
+ return sclass(*args, **kwargs)
+ raise LookupError("Not find the FPGA driver for vendor %s" % vendor)
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def discover(self):
+ raise NotImplementedError()
+
+ def program(self, device_path, image):
+ raise NotImplementedError()
+
+ @classmethod
+ def discover_vendors(cls):
+ return utils.discover_vendors() \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/driver.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/driver.py
new file mode 100644
index 0000000..d04f7d9
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/driver.py
@@ -0,0 +1,56 @@
+
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg Intel FPGA driver implementation.
+"""
+
+import subprocess
+
+from cyborg.accelerator.drivers.fpga.base import FPGADriver
+from cyborg.accelerator.drivers.fpga.intel import sysinfo
+
+
+class IntelFPGADriver(FPGADriver):
+ """Base class for FPGA drivers.
+ This is just a virtual FPGA drivers interface.
+ Vedor should implement their specific drivers.
+ """
+ VENDOR = "intel"
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def discover(self):
+ return sysinfo.fpga_tree()
+
+ def program(self, device_path, image):
+ bdf = ""
+ path = sysinfo.find_pf_by_vf(device_path) if sysinfo.is_vf(
+ device_path) else device_path
+ if sysinfo.is_bdf(device_path):
+ bdf = sysinfo.get_pf_bdf(device_path)
+ else:
+ bdf = sysinfo.get_bdf_by_path(path)
+ bdfs = sysinfo.split_bdf(bdf)
+ cmd = ["sudo", "fpgaconf"]
+ for i in zip(["-b", "-d", "-f"], bdfs):
+ cmd.extend(i)
+ cmd.append(image)
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ # FIXME Should log p.communicate(), p.stderr
+ p.wait()
+ return p.returncode \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/sysinfo.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/sysinfo.py
new file mode 100644
index 0000000..64cfc13
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/intel/sysinfo.py
@@ -0,0 +1,162 @@
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg Intel FPGA driver implementation.
+"""
+
+# from cyborg.accelerator.drivers.fpga.base import FPGADriver
+
+import glob
+import os
+import re
+
+
+SYS_FPGA = "/sys/class/fpga"
+DEVICE = "device"
+PF = "physfn"
+VF = "virtfn*"
+BDF_PATTERN = re.compile(
+ "^[a-fA-F\d]{4}:[a-fA-F\d]{2}:[a-fA-F\d]{2}\.[a-fA-F\d]$")
+
+
+DEVICE_FILE_MAP = {"vendor": "vendor_id",
+ "device": "product_id",
+ "sriov_numvfs": "pr_num"}
+DEVICE_FILE_HANDLER = {}
+DEVICE_EXPOSED = ["vendor", "device", "sriov_numvfs"]
+
+
+def all_fpgas():
+ # glob.glob1("/sys/class/fpga", "*")
+ return glob.glob(os.path.join(SYS_FPGA, "*"))
+
+
+def all_vf_fpgas():
+ return [dev.rsplit("/", 2)[0] for dev in
+ glob.glob(os.path.join(SYS_FPGA, "*/device/physfn"))]
+
+
+def all_pure_pf_fpgas():
+ return [dev.rsplit("/", 2)[0] for dev in
+ glob.glob(os.path.join(SYS_FPGA, "*/device/virtfn0"))]
+
+
+def target_symbolic_map():
+ maps = {}
+ for f in glob.glob(os.path.join(SYS_FPGA, "*/device")):
+ maps[os.path.realpath(f)] = os.path.dirname(f)
+ return maps
+
+
+def bdf_path_map():
+ maps = {}
+ for f in glob.glob(os.path.join(SYS_FPGA, "*/device")):
+ maps[os.path.basename(os.path.realpath(f))] = os.path.dirname(f)
+ return maps
+
+
+def all_vfs_in_pf_fpgas(pf_path):
+ maps = target_symbolic_map()
+ vfs = glob.glob(os.path.join(pf_path, "device/virtfn*"))
+ return [maps[os.path.realpath(vf)] for vf in vfs]
+
+
+def all_pf_fpgas():
+ return [dev.rsplit("/", 2)[0] for dev in
+ glob.glob(os.path.join(SYS_FPGA, "*/device/sriov_totalvfs"))]
+
+
+def is_vf(path):
+ return True if glob.glob(os.path.join(path, "device/physfn")) else False
+
+
+def find_pf_by_vf(path):
+ maps = target_symbolic_map()
+ p = os.path.realpath(os.path.join(path, "device/physfn"))
+ return maps[p]
+
+
+def is_bdf(bdf):
+ return True if BDF_PATTERN.match(bdf) else False
+
+
+def get_bdf_by_path(path):
+ return os.path.basename(os.readlink(os.path.join(path, "device")))
+
+
+def split_bdf(bdf):
+ return ["0x" + v for v in bdf.replace(".", ":").rsplit(":")[1:]]
+
+
+def get_pf_bdf(bdf):
+ path = bdf_path_map().get(bdf)
+ if path:
+ path = find_pf_by_vf(path) if is_vf(path) else path
+ return get_bdf_by_path(path)
+ return bdf
+
+
+def fpga_device(path):
+ infos = {}
+
+ def read_line(filename):
+ with open(filename) as f:
+ return f.readline().strip()
+
+ # NOTE "In 3.x, os.path.walk is removed in favor of os.walk."
+ for (dirpath, dirnames, filenames) in os.walk(path):
+ for filename in filenames:
+ if filename in DEVICE_EXPOSED:
+ key = DEVICE_FILE_MAP.get(filename) or filename
+ if key in DEVICE_FILE_HANDLER and callable(
+ DEVICE_FILE_HANDLER(key)):
+ infos[key] = DEVICE_FILE_HANDLER(key)(
+ os.path.join(dirpath, filename))
+ else:
+ infos[key] = read_line(os.path.join(dirpath, filename))
+ return infos
+
+
+def fpga_tree():
+
+ def gen_fpga_infos(path, vf=True):
+ name = os.path.basename(path)
+ dpath = os.path.realpath(os.path.join(path, DEVICE))
+ bdf = os.path.basename(dpath)
+ func = "vf" if vf else "pf"
+ pf_bdf = os.path.basename(
+ os.path.realpath(os.path.join(dpath, PF))) if vf else ""
+ fpga = {"path": path, "function": func,
+ "devices": bdf, "assignable": True,
+ "parent_devices": pf_bdf,
+ "name": name}
+ d_info = fpga_device(dpath)
+ fpga.update(d_info)
+ return fpga
+
+ devs = []
+ pure_pfs = all_pure_pf_fpgas()
+ for pf in all_pf_fpgas():
+ fpga = gen_fpga_infos(pf, False)
+ if pf in pure_pfs:
+ fpga["assignable"] = False
+ fpga["regions"] = []
+ vfs = all_vfs_in_pf_fpgas(pf)
+ for vf in vfs:
+ vf_fpga = gen_fpga_infos(vf, True)
+ fpga["regions"].append(vf_fpga)
+ devs.append(fpga)
+ return devs \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/utils.py
new file mode 100644
index 0000000..b090659
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/fpga/utils.py
@@ -0,0 +1,36 @@
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Utils for FPGA driver.
+"""
+
+import glob
+import re
+
+
+VENDORS = ["intel"] # can extend, such as ["intel", "xilinx"]
+
+SYS_FPGA_PATH = "/sys/class/fpga"
+VENDORS_PATTERN = re.compile("|".join(["(%s)" % v for v in VENDORS]))
+
+
+def discover_vendors():
+ vendors = set()
+ for p in glob.glob1(SYS_FPGA_PATH, "*"):
+ m = VENDORS_PATTERN.match(p)
+ if m:
+ vendors.add(m.group())
+ return list(vendors) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/generic_driver.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/generic_driver.py
new file mode 100644
index 0000000..2e13ecc
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/generic_driver.py
@@ -0,0 +1,80 @@
+# Copyright 2017 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg Generic driver implementation.
+"""
+
+from modules import generic
+from oslo_config import cfg
+from oslo_log import log
+
+from cyborg.accelerator import accelerator
+from cyborg.conductor.rpcapi import ConductorAPI as conductor_api
+
+LOG = log.getLogger(__name__)
+
+
+CONF = cfg.CONF
+
+
+class GenericDriver(generic.GENERICDRIVER):
+ """Executes commands relating to Shares."""
+
+ def __init__(self, *args, **kwargs):
+ """Do initialization."""
+ super(GenericDriver, self).__init__()
+ self.configuration.append_config_values()
+ self._helpers = {}
+ self.backend_name = self.configuration.safe_get(
+ 'accelerator_backend_name')
+
+ def do_setup(self, context):
+ """Any initialization the generic driver does while starting."""
+ super(GenericDriver, self).do_setup(context)
+ self.acc = accelerator.Accelerator()
+
+ def create_accelerator(self, context):
+ """Creates accelerator."""
+ self.acc = conductor_api.accelerator_create(
+ context=context, obj_acc=self.accelerator)
+ LOG.debug("Created a new accelerator with the UUID %s ",
+ self.accelerator.accelerator_id)
+
+ def get_accelerator(self, context):
+ """Gets accelerator by UUID."""
+ self.acc = conductor_api.accelerator_list_one(
+ context=context, obj_acc=self.accelerator)
+ return self.acc
+
+ def list_accelerators(self, context):
+ """Lists all accelerators."""
+ self.acc = conductor_api.accelerator_list_all(
+ context=context, obj_acc=self.accelerator)
+ return self.acc
+
+ def update_accelerator(self, context):
+ """Updates accelerator with a patch update."""
+
+ self.acc = conductor_api.accelerator_update(
+ context=context, obj_acc=self.accelerator)
+ LOG.debug("Updated accelerator %s ",
+ self.accelerator.accelerator_id)
+
+ def delete_accelerator(self, context):
+ """Deletes a specific accelerator."""
+ LOG.debug("Deleting accelerator %s ", self.accelerator.accelerator_id)
+ conductor_api.accelerator_delete(context=context,
+ obj_acc=self.accelerator)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/__init__.py
new file mode 100644
index 0000000..a226d3f
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/__init__.py
@@ -0,0 +1,28 @@
+from cyborg.accelerator.drivers.gpu.nvidia.driver import NVIDIAGPUDriver
+import os
+import glob
+
+from oslo_log import log as logging
+
+
+__import__('pkg_resources').declare_namespace(__name__)
+__import__(".".join([__package__, 'base']))
+
+
+LOG = logging.getLogger(__name__)
+
+
+def load_gpu_vendor_driver():
+ files = glob.glob(os.path.join(os.path.dirname(__file__), "*/driver*"))
+ modules = set(map(lambda s: ".".join(s.rsplit(".")[0].rsplit("/", 2)[-2:]),
+ files))
+ for m in modules:
+ try:
+ __import__(".".join([__package__, m]))
+ LOG.debug("Successfully loaded GPU vendor driver: %s." % m)
+ except ImportError as e:
+ LOG.error("Failed to load GPU vendor driver: %s. Details: %s"
+ % (m, e))
+
+
+load_gpu_vendor_driver()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/base.py
new file mode 100644
index 0000000..80f31fc
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/base.py
@@ -0,0 +1,56 @@
+# Copyright 2018 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg GPU driver implementation.
+"""
+from oslo_log import log as logging
+
+from cyborg.accelerator.drivers.gpu import utils
+
+
+LOG = logging.getLogger(__name__)
+
+VENDOR_MAPS = {"10de": "nvidia", "102b": "matrox"}
+
+
+class GPUDriver(object):
+ """Base class for GPU drivers.
+
+ This is just a virtual GPU drivers interface.
+ Vedor should implement their specific drivers.
+ """
+
+ @classmethod
+ def create(cls, vendor, *args, **kwargs):
+ for sclass in cls.__subclasses__():
+ vendor_name = VENDOR_MAPS.get(vendor)
+ if vendor_name == sclass.VENDOR:
+ return sclass(*args, **kwargs)
+ # raise LookupError("Not find the GPU driver for vendor_id %s" % vendor)
+ LOG.warn("Not find the GPU driver for vendor_id %s" % vendor)
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def discover(self):
+ raise NotImplementedError()
+
+ def program(self, device_path, image):
+ raise NotImplementedError()
+
+ @classmethod
+ def discover_vendors(cls):
+ return utils.discover_vendors()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/__init__.py
new file mode 100644
index 0000000..4e5d499
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/__init__.py
@@ -0,0 +1 @@
+__author__ = 'wangzh21'
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/driver.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/driver.py
new file mode 100644
index 0000000..c225555
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/driver.py
@@ -0,0 +1,41 @@
+# Copyright 2018 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg Intel GPU driver implementation.
+"""
+
+import subprocess
+
+from cyborg.accelerator.drivers.gpu.base import GPUDriver
+from cyborg.accelerator.drivers.gpu.nvidia import sysinfo
+
+
+class NVIDIAGPUDriver(GPUDriver):
+ """Base class for GPU drivers.
+
+ This is just a virtual GPU drivers interface.
+ Vedor should implement their specific drivers.
+ """
+ VENDOR = "nvidia"
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def discover(self):
+ return sysinfo.gpu_tree()
+
+ def program(self, device_path, image):
+ pass
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/sysinfo.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/sysinfo.py
new file mode 100644
index 0000000..cab4c32
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/nvidia/sysinfo.py
@@ -0,0 +1,30 @@
+# Copyright 2018 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg Intel GPU driver implementation.
+"""
+
+from cyborg.accelerator.drivers.gpu import utils
+VENDER_ID = "10de"
+
+
+def all_gpus():
+ pass
+
+
+def gpu_tree():
+ devs = utils.discover_gpus(VENDER_ID)
+ return devs
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/utils.py
new file mode 100644
index 0000000..97c7909
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/gpu/utils.py
@@ -0,0 +1,67 @@
+# Copyright 2018 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Utils for GPU driver.
+"""
+
+import re
+import subprocess
+
+
+GPU_FLAGS = ["VGA compatible controller", "3D controller"]
+GPU_INFO_PATTERN = re.compile("(?P<devices>[0-9]{2}:[0-9]{2}\.[0-9]) (?P"
+ "<name>.*) \[.* [\[](?P<vendor_id>[0-9a-fA-F]{4})"
+ ":(?P<product_id>[0-9a-fA-F]{4})] .*")
+
+
+def discover_vendors():
+ cmd = "sudo lspci -nnn | grep -E '%s'"
+ cmd = cmd % "|".join(GPU_FLAGS)
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
+ p.wait()
+ gpus = p.stdout.readlines()
+ vendors = set()
+ for gpu in gpus:
+ m = GPU_INFO_PATTERN.match(gpu)
+ if m:
+ vendor_id = m.groupdict().get("vendor_id")
+ vendors.add(vendor_id)
+ return vendors
+
+
+def discover_gpus(vender_id=None):
+ cmd = "sudo lspci -nnn | grep -E '%s'"
+ cmd = cmd % "|".join(GPU_FLAGS)
+ if vender_id:
+ cmd = cmd + "| grep " + vender_id
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
+ p.wait()
+ gpus = p.stdout.readlines()
+ gpu_list = []
+ for gpu in gpus:
+ m = GPU_INFO_PATTERN.match(gpu)
+ if m:
+ gpu_dict = m.groupdict()
+ gpu_dict["function"] = "GPU"
+ gpu_dict["devices"] = _match_nova_addr(gpu_dict["devices"])
+ gpu_dict["assignable"] = True
+ gpu_list.append(gpu_dict)
+ return gpu_list
+
+
+def _match_nova_addr(devices):
+ addr = '0000:'+devices.replace(".", ":")
+ return addr
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/generic.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/generic.py
new file mode 100644
index 0000000..cc284a7
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/generic.py
@@ -0,0 +1,81 @@
+# Copyright 2017 Lenovo, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""
+Cyborg Generic driver modules implementation.
+"""
+
+from cyborg.accelerator.common import exception
+from cyborg.accelerator.drivers import base
+from oslo_log import log as logging
+
+LOG = logging.getLogger(__name__)
+
+# NOTE (crushil): REQUIRED_PROPERTIES needs to be filled out.
+REQUIRED_PROPERTIES = {'create', 'get', 'list', 'update', 'delete'}
+COMMON_PROPERTIES = REQUIRED_PROPERTIES
+
+
+def _check_for_missing_params(info_dict, error_msg, param_prefix=''):
+ missing_info = []
+ for label, value in info_dict.items():
+ if not value:
+ missing_info.append(param_prefix + label)
+
+ if missing_info:
+ exc_msg = _("%(error_msg)s. Missing are: %(missing_info)s")
+ raise exception.MissingParameterValue(
+ exc_msg % {'error_msg': error_msg, 'missing_info': missing_info})
+
+
+def _parse_driver_info(driver):
+ info = driver.driver_info
+ d_info = {k: info.get(k) for k in COMMON_PROPERTIES}
+ error_msg = _("Cannot validate Generic Driver. Some parameters were"
+ " missing in the configuration file.")
+ _check_for_missing_params(d_info, error_msg)
+ return d_info
+
+
+class GENERICDRIVER(base.BaseDriver):
+
+ def get_properties(self):
+ """Return the properties of the generic driver.
+
+ :returns: dictionary of <property name>:<property description> entries.
+ """
+ return COMMON_PROPERTIES
+
+ def attach(self, accelerator, instance):
+
+ def install(self, accelerator):
+ pass
+
+ def detach(self, accelerator, instance):
+
+ def uninstall(self, accelerator):
+ pass
+
+ def delete(self):
+ pass
+
+ def discover(self):
+ pass
+
+ def list(self):
+ pass
+
+ def update(self, accelerator, **kwargs):
+ pass
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/netronome.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/netronome.py
new file mode 100755
index 0000000..6b15d34
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/netronome.py
@@ -0,0 +1,73 @@
+"""
+Cyborg Netronome driver modules implementation.
+"""
+import os
+import json
+import socket
+
+
+
+from cyborg.accelerator.drivers.modules import generic
+
+from oslo_log import log as logging
+
+LOG = logging.getLogger(__name__)
+
+class NETRONOMEDRIVER(generic.GENERICDRIVER):
+ def __init__(self, *args, **kwargs):
+ super(NETRONOMEDRIVER, self).__init__(*args, **kwargs)
+
+ self.port_name_prefix = 'sdn_v0.'
+ self.port_index_max = 59
+
+ def get_available_resource(self):
+ port_resource = self._read_config()
+ if port_resource:
+ for port in port_resource:
+ port["computer_node"] = socket.gethostname()
+ LOG.info('Discover netronome port %s '% (port_resource))
+ return port_resource
+
+ def _ovs_port_check(self, port_name):
+ for port in self.bridge_port_list:
+ if port_name == port.strip():
+ return True
+
+ return False
+
+
+ def _read_config(self):
+ '''read tag_config_path tags config file
+ and return direction format variables'''
+ self.tag_config_path = '/etc/cyborg/netronome_ports.json'
+ if os.path.exists(self.tag_config_path):
+ config_file = open(self.tag_config_path, 'r')
+ else:
+ output = 'There is no %s' % (self.tag_config_path)
+ LOG.error('There is no %s' % (self.tag_config_path))
+ return
+
+ try:
+ buf = config_file.read()
+ netronome = json.loads(buf)
+ except Exception:
+ LOG.error('Failed to read %s' % (self.tag_config_path))
+
+ return netronome['netronome_ports']
+
+ def discover_ports(self):
+ port_list = []
+ for i in range(0, port_index_max + 1):
+ port_name = port_name_prefix + str(i)
+ port = dict()
+ port["bind_instance"] = None
+ port["bind_port"] = None
+ port["is_used"] = False
+ port["pci_slot"] = os.popen("ethtool -i %s | grep bus-info | cut -d ' ' -f 5" % port_name).read().strip()
+ port["port_id"] = i
+ port["port_name"] = port_name
+ port["product_id"] = "6003"
+ port["vender_id"] = "19ee"
+
+ port_list.append(port)
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/spdk.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/spdk.py
new file mode 100644
index 0000000..276b9bb
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/modules/spdk.py
@@ -0,0 +1,147 @@
+"""
+Cyborg SPDK driver modules implementation.
+"""
+
+import socket
+from cyborg.accelerator.common import exception
+from cyborg.accelerator.drivers.modules import generic
+from oslo_log import log as logging
+from oslo_config import cfg
+from oslo_concurrency import processutils as putils
+from cyborg.common.i18n import _
+from cyborg.accelerator import configuration
+from cyborg.db.sqlalchemy import api
+
+LOG = logging.getLogger(__name__)
+
+accelerator_opts = [
+ cfg.StrOpt('spdk_conf_file',
+ default='/etc/cyborg/spdk.conf',
+ help=_('SPDK conf file to use for the SPDK driver in Cyborg;')),
+
+ cfg.StrOpt('device_type',
+ default='NVMe',
+ help=_('Default backend device type: NVMe')),
+
+ cfg.IntOpt('queue',
+ default=8,
+ help=_('Default number of queues')),
+
+ cfg.IntOpt('iops',
+ default=1000,
+ help=_('Default number of iops')),
+
+ cfg.IntOpt('bandwidth:',
+ default=800,
+ help=_('Default bandwidth')),
+
+ cfg.BoolOpt('remoteable:',
+ default=False,
+ help=_('remoteable is false by default'))
+
+]
+
+CONF = cfg.CONF
+CONF.register_opts(accelerator_opts, group=configuration.SHARED_CONF_GROUP)
+
+try:
+ import py_spdk
+except ImportError:
+ py_spdk = None
+
+
+class SPDKDRIVER(generic.GENERICDRIVER):
+ def __init__(self, execute=putils.execute, *args, **kwargs):
+ super(SPDKDRIVER, self).__init__(execute, *args, **kwargs)
+ self.configuration.append_config_values(accelerator_opts)
+ self.hostname = socket.gethostname()
+ self.driver_type = self.configuration\
+ .safe_get('accelerator_backend_name') or 'SPDK'
+ self.device_type = self.configuration.safe_get('device_type')
+ self.dbconn = api.get_backend()
+
+ def initialize_connection(self, accelerator, connector):
+ return py_spdk.initialize_connection(accelerator, connector)
+
+ def validate_connection(self, connector):
+ return py_spdk.initialize_connection(connector)
+
+ def destory_db(self):
+ if self.dbconn is not None:
+ self.dbconn.close()
+
+ def discover_driver(self, driver_type):
+ HAVE_SPDK = None
+ if HAVE_SPDK:
+ values = {'acc_type': self.driver_type}
+ self.dbconn.accelerator_create(None, values)
+
+ def install_driver(self, driver_id, driver_type):
+ accelerator = self.dbconn.accelerator_query(None, driver_id)
+ if accelerator:
+ self.initialize_connection(accelerator, None)
+ self.do_setup()
+ ctrlr = self.get_controller()
+ nsid = self.get_allocated_nsid(ctrlr)
+ self.attach_instance(nsid)
+ else:
+ msg = (_("Could not find %s accelerator") % driver_type)
+ raise exception.InvalidAccelerator(msg)
+
+ def uninstall_driver(self, driver_id, driver_type):
+ ctrlr = self.get_controller()
+ nsid = self.get_allocated_nsid(ctrlr)
+ self.detach_instance(nsid)
+ pass
+
+ def driver_list(self, driver_type):
+ return self.dbconn.accelerator_query(None, driver_type)
+
+ def update(self, driver_type):
+ pass
+
+ def attach_instance(self, instance_id):
+ self.add_ns()
+ self.attach_and_detach_ns()
+ pass
+
+ def detach_instance(self, instance_id):
+ self.delete_ns()
+ self.detach_and_detach_ns()
+ pass
+
+ def get_controller(self):
+ return self.ctrlr
+
+ '''list controllers'''
+
+ def display_controller_list(self):
+ pass
+
+ '''create namespace'''
+
+ def add_ns(self):
+ pass
+
+ '''delete namespace'''
+
+ def delete_ns(self):
+ pass
+
+ '''attach namespace to controller'''
+
+ def attach_and_detach_ns(self):
+ pass
+
+ '''detach namespace from controller'''
+
+ def detach_and_detach_ns(self):
+ pass
+
+ ''' format namespace or controller'''
+
+ def format_nvm(self):
+ pass
+
+ def get_allocated_nsid(self, ctrl):
+ return self.nsid
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/nvmf.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/nvmf.py
new file mode 100644
index 0000000..6e482a1
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/nvmf/nvmf.py
@@ -0,0 +1,113 @@
+"""
+SPDK NVMFDRIVER module implementation.
+"""
+
+from cyborg.accelerator.drivers.spdk.util.pyspdk.nvmf_client import NvmfTgt
+from oslo_log import log as logging
+from cyborg.accelerator.common import exception
+from cyborg.accelerator.drivers.spdk.util import common_fun
+from cyborg.accelerator.drivers.spdk.spdk import SPDKDRIVER
+from cyborg.accelerator.drivers.spdk.util.pyspdk.py_spdk import PySPDK
+
+LOG = logging.getLogger(__name__)
+
+
+class NVMFDRIVER(SPDKDRIVER):
+ """NVMFDRIVER class.
+ nvmf_tgt server app should be able to implement this driver.
+ """
+
+ SERVER = 'nvmf'
+
+ def __init__(self, *args, **kwargs):
+ super(NVMFDRIVER, self).__init__(*args, **kwargs)
+ self.servers = common_fun.discover_servers()
+ self.py = common_fun.get_py_client(self.SERVER)
+
+ def discover_accelerator(self):
+ if common_fun.check_for_setup_error(self.py, self.SERVER):
+ return self.get_one_accelerator()
+
+ def get_one_accelerator(self):
+ acc_client = NvmfTgt(self.py)
+ bdevs = acc_client.get_bdevs()
+ # Display current blockdev list
+ subsystems = acc_client.get_nvmf_subsystems()
+ # Display nvmf subsystems
+ accelerator_obj = {
+ 'server': self.SERVER,
+ 'bdevs': bdevs,
+ 'subsystems': subsystems
+ }
+ return accelerator_obj
+
+ def install_accelerator(self, driver_id, driver_type):
+ pass
+
+ def uninstall_accelerator(self, driver_id, driver_type):
+ pass
+
+ def accelerator_list(self):
+ return self.get_all_accelerators()
+
+ def get_all_accelerators(self):
+ accelerators = []
+ for accelerator_i in range(len(self.servers)):
+ accelerator = self.servers[accelerator_i]
+ py_tmp = PySPDK(accelerator)
+ if py_tmp.is_alive():
+ accelerators.append(self.get_one_accelerator())
+ return accelerators
+
+ def update(self, driver_type, **kwargs):
+ pass
+
+ def attach_instance(self, instance_id):
+ pass
+
+ def detach_instance(self, instance_id):
+ pass
+
+ def delete_subsystem(self, nqn):
+ """Delete a nvmf subsystem
+ :param nqn: Target nqn(ASCII).
+ :raise exception: Invaid
+ """
+ if nqn == "":
+ acc_client = NvmfTgt(self.py)
+ acc_client.delete_nvmf_subsystem(nqn)
+ else:
+ raise exception.Invalid('Delete nvmf subsystem failed.')
+
+ def construct_subsystem(self,
+ nqn,
+ listen,
+ hosts,
+ serial_number,
+ namespaces
+ ):
+ """Add a nvmf subsystem
+ :param nqn: Target nqn(ASCII).
+ :param listen: comma-separated list of Listen
+ <trtype:transport_name traddr:address trsvcid:port_id>
+ pairs enclosed in quotes. Format:'trtype:transport0
+ traddr:traddr0 trsvcid:trsvcid0,trtype:transport1
+ traddr:traddr1 trsvcid:trsvcid1' etc.
+ Example: 'trtype:RDMA traddr:192.168.100.8 trsvcid:4420,
+ trtype:RDMA traddr:192.168.100.9 trsvcid:4420.'
+ :param hosts: Whitespace-separated list of host nqn list.
+ :param serial_number: Example: 'SPDK00000000000001.
+ :param namespaces: Whitespace-separated list of namespaces.
+ :raise exception: Invaid
+ """
+ if ((namespaces != '' and listen != '') and
+ (hosts != '' and serial_number != '')) and nqn != '':
+ acc_client = NvmfTgt(self.py)
+ acc_client.construct_nvmf_subsystem(nqn,
+ listen,
+ hosts,
+ serial_number,
+ namespaces
+ )
+ else:
+ raise exception.Invalid('Construct nvmf subsystem failed.') \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/spdk.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/spdk.py
new file mode 100644
index 0000000..c42522f
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/spdk.py
@@ -0,0 +1,75 @@
+"""
+Cyborg SPDK driver modules implementation.
+"""
+
+from oslo_log import log as logging
+LOG = logging.getLogger(__name__)
+
+
+class SPDKDRIVER(object):
+ """SPDKDRIVER
+ This is just a virtual SPDK drivers interface.
+ SPDK-based app server should implement their specific drivers.
+ """
+ @classmethod
+ def create(cls, server, *args, **kwargs):
+ for subclass in cls.__subclasses__():
+ if server == subclass.SERVER:
+ return subclass(*args, **kwargs)
+ raise LookupError("Could not find the driver for server %s" % server)
+
+ def __init__(self, *args, **kwargs):
+ super(SPDKDRIVER, self).__init__()
+
+ def discover_accelerator(self):
+ """Discover a backend accelerator
+ :return: accelerator list.
+ """
+ raise NotImplementedError('Subclasses must implement this method.')
+
+ def install_accelerator(self, driver_id, driver_type):
+ """install a backend accelerator
+ :param driver_id: driver id.
+ :param driver_type: driver type.
+ :raise: NotImplementedError.
+ """
+ raise NotImplementedError('Subclasses must implement this method.')
+
+ def uninstall_accelerator(self, driver_id, driver_type):
+ """uninstall a backend accelerator
+ :param driver_id: driver id.
+ :param driver_type: driver type.
+ :raise: NotImplementedError.
+ """
+ raise NotImplementedError('Subclasses must implement this method.')
+
+ def accelerator_list(self):
+ """Discover a backend accelerator list
+ :return: accelerator list.
+ :raise: NotImplementedError.
+ """
+ raise NotImplementedError('Subclasses must implement this method.')
+
+ def update(self, driver_type, **kwargs):
+ """update
+ :param driver_type: driver type.
+ :param kwargs: kwargs.
+ :raise: NotImplementedError.
+ """
+ raise NotImplementedError('Subclasses must implement this method.')
+
+ def attach_instance(self, instance_id):
+ """attach a backend instance
+ :param instance_id: instance id.
+ :return: instance.
+ :raise: NotImplementedError.
+ """
+ raise NotImplementedError('Subclasses must implement this method.')
+
+ def detach_instance(self, instance_id):
+ """detach a backend instance
+ :param instance_id: instance id.
+ :return: instance.
+ :raise: NotImplementedError.
+ """
+ raise NotImplementedError('Subclasses must implement this method.') \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/common_fun.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/common_fun.py
new file mode 100644
index 0000000..34f3e87
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/common_fun.py
@@ -0,0 +1,206 @@
+"""
+Utils for SPDK driver.
+"""
+
+import glob
+import os
+import re
+
+from oslo_config import cfg
+from oslo_log import log as logging
+
+from cyborg.accelerator import configuration
+from cyborg.accelerator.common import exception
+from cyborg.accelerator.drivers.spdk.util.pyspdk.py_spdk import PySPDK
+from cyborg.common.i18n import _
+from pyspdk.nvmf_client import NvmfTgt
+from pyspdk.vhost_client import VhostTgt
+
+LOG = logging.getLogger(__name__)
+
+accelerator_opts = [
+ cfg.StrOpt('spdk_conf_file',
+ default='/etc/cyborg/spdk.conf',
+ help=_('SPDK conf file to be used for the SPDK driver')),
+
+ cfg.StrOpt('accelerator_servers',
+ default=['vhost', 'nvmf', 'iscsi'],
+ help=_('A list of accelerator servers to enable by default')),
+
+ cfg.StrOpt('spdk_dir',
+ default='/home/wewe/spdk',
+ help=_('The SPDK directory is /home/{user_name}/spdk')),
+
+ cfg.StrOpt('device_type',
+ default='NVMe',
+ help=_('Backend device type is NVMe by default')),
+
+ cfg.BoolOpt('remoteable',
+ default=False,
+ help=_('Remoteable is false by default'))
+]
+
+CONF = cfg.CONF
+CONF.register_opts(accelerator_opts, group=configuration.SHARED_CONF_GROUP)
+
+config = configuration.Configuration(accelerator_opts)
+config.append_config_values(accelerator_opts)
+SERVERS = config.safe_get('accelerator_servers')
+SERVERS_PATTERN = re.compile("|".join(["(%s)" % s for s in SERVERS]))
+SPDK_SERVER_APP_DIR = os.path.join(config.safe_get('spdk_dir'), 'app/')
+
+
+def discover_servers():
+ """Discover backend servers according to the CONF
+ :returns: server list.
+ """
+ servers = set()
+ for p in glob.glob1(SPDK_SERVER_APP_DIR, "*"):
+ m = SERVERS_PATTERN.match(p)
+ if m:
+ servers.add(m.group())
+ return list(servers)
+
+
+def delete_bdev(py, accelerator, name):
+ """Delete a blockdev
+ :param py: py_client.
+ :param accelerator: accelerator.
+ :param name: Blockdev name to be deleted.
+ """
+ acc_client = get_accelerator_client(py, accelerator)
+ acc_client.delete_bdev(name)
+
+
+def kill_instance(py, accelerator, sig_name):
+ """Send signal to instance
+ :param py: py_client.
+ :param accelerator: accelerator.
+ :param sig_name: signal will be sent to server.
+ """
+ acc_client = get_accelerator_client(py, accelerator)
+ acc_client.kill_instance(sig_name)
+
+
+def construct_aio_bdev(py, accelerator, filename, name, block_size):
+ """Add a bdev with aio backend
+ :param py: py_client.
+ :param accelerator: accelerator.
+ :param filename: Path to device or file (ex: /dev/sda).
+ :param name: Block device name.
+ :param block_size: Block size for this bdev.
+ :return: name.
+ """
+ acc_client = get_accelerator_client(py, accelerator)
+ acc_client.construct_aio_bdev(filename, name, block_size)
+ return name
+
+
+def construct_error_bdev(py, accelerator, basename):
+ """Add a bdev with error backend
+ :param py: py_client.
+ :param accelerator: accelerator.
+ :param basename: Path to device or file (ex: /dev/sda).
+ """
+ acc_client = get_accelerator_client(py, accelerator)
+ acc_client.construct_error_bdev(basename)
+
+
+def construct_nvme_bdev(py,
+ accelerator,
+ name,
+ trtype,
+ traddr,
+ adrfam,
+ trsvcid,
+ subnqn
+ ):
+ """Add a bdev with nvme backend
+ :param py: py_client.
+ :param accelerator: accelerator.
+ :param name: Name of the bdev.
+ :param trtype: NVMe-oF target trtype: e.g., rdma, pcie.
+ :param traddr: NVMe-oF target address: e.g., an ip address
+ or BDF.
+ :param adrfam: NVMe-oF target adrfam: e.g., ipv4, ipv6, ib,
+ fc, intra_host.
+ :param trsvcid: NVMe-oF target trsvcid: e.g., a port number.
+ :param subnqn: NVMe-oF target subnqn.
+ :return: name.
+ """
+ acc_client = get_accelerator_client(py, accelerator)
+ acc_client.construct_nvme_bdev(name,
+ trtype,
+ traddr,
+ adrfam,
+ trsvcid,
+ subnqn
+ )
+ return name
+
+
+def construct_null_bdev(py,
+ accelerator,
+ name,
+ total_size,
+ block_size
+ ):
+ """Add a bdev with null backend
+ :param py: py_client.
+ :param accelerator: accelerator.
+ :param name: Block device name.
+ :param total_size: Size of null bdev in MB (int > 0).
+ :param block_size: Block size for this bdev.
+ :return: name.
+ """
+ acc_client = get_accelerator_client(py, accelerator)
+ acc_client.construct_null_bdev(name, total_size, block_size)
+ return name
+
+
+def get_py_client(server):
+ """Get the py_client instance
+ :param server: server.
+ :return: Boolean.
+ :raise: InvalidAccelerator.
+ """
+ if server in SERVERS:
+ py = PySPDK(server)
+ return py
+ else:
+ msg = (_("Could not find %s accelerator") % server)
+ raise exception.InvalidAccelerator(msg)
+
+
+def check_for_setup_error(py, server):
+ """Check server's status
+ :param py: py_client.
+ :param server: server.
+ :return: Boolean.
+ :raise: AcceleratorException.
+ """
+ if py.is_alive():
+ return True
+ else:
+ msg = (_("%s accelerator is down") % server)
+ raise exception.AcceleratorException(msg)
+
+
+def get_accelerator_client(py, accelerator):
+ """Get the specific client that communicates with server
+ :param py: py_client.
+ :param accelerator: accelerator.
+ :return: acc_client.
+ :raise: InvalidAccelerator.
+ """
+ acc_client = None
+ if accelerator == 'vhost':
+ acc_client = VhostTgt(py)
+ return acc_client
+ elif accelerator == 'nvmf':
+ acc_client = NvmfTgt(py)
+ return acc_client
+ else:
+ exc_msg = (_("accelerator_client %(acc_client) is missing")
+ % acc_client)
+ raise exception.InvalidAccelerator(exc_msg) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/nvmf_client.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/nvmf_client.py
new file mode 100644
index 0000000..840087f
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/nvmf_client.py
@@ -0,0 +1,119 @@
+import json
+
+
+class NvmfTgt(object):
+
+ def __init__(self, py):
+ super(NvmfTgt, self).__init__()
+ self.py = py
+
+ def get_rpc_methods(self):
+ rpc_methods = self._get_json_objs(
+ 'get_rpc_methods', '10.0.2.15')
+ return rpc_methods
+
+ def get_bdevs(self):
+ block_devices = self._get_json_objs(
+ 'get_bdevs', '10.0.2.15')
+ return block_devices
+
+ def delete_bdev(self, name):
+ sub_args = [name]
+ res = self.py.exec_rpc('delete_bdev', '10.0.2.15', sub_args=sub_args)
+ print res
+
+ def kill_instance(self, sig_name):
+ sub_args = [sig_name]
+ res = self.py.exec_rpc('kill_instance', '10.0.2.15', sub_args=sub_args)
+ print res
+
+ def construct_aio_bdev(self, filename, name, block_size):
+ sub_args = [filename, name, str(block_size)]
+ res = self.py.exec_rpc(
+ 'construct_aio_bdev',
+ '10.0.2.15',
+ sub_args=sub_args)
+ print res
+
+ def construct_error_bdev(self, basename):
+ sub_args = [basename]
+ res = self.py.exec_rpc(
+ 'construct_error_bdev',
+ '10.0.2.15',
+ sub_args=sub_args)
+ print res
+
+ def construct_nvme_bdev(
+ self,
+ name,
+ trtype,
+ traddr,
+ adrfam=None,
+ trsvcid=None,
+ subnqn=None):
+ sub_args = ["-b", "-t", "-a"]
+ sub_args.insert(1, name)
+ sub_args.insert(2, trtype)
+ sub_args.insert(3, traddr)
+ if adrfam is not None:
+ sub_args.append("-f")
+ sub_args.append(adrfam)
+ if trsvcid is not None:
+ sub_args.append("-s")
+ sub_args.append(trsvcid)
+ if subnqn is not None:
+ sub_args.append("-n")
+ sub_args.append(subnqn)
+ res = self.py.exec_rpc(
+ 'construct_nvme_bdev',
+ '10.0.2.15',
+ sub_args=sub_args)
+ return res
+
+ def construct_null_bdev(self, name, total_size, block_size):
+ sub_args = [name, str(total_size), str(block_size)]
+ res = self.py.exec_rpc(
+ 'construct_null_bdev',
+ '10.0.2.15',
+ sub_args=sub_args)
+ return res
+
+ def construct_malloc_bdev(self, total_size, block_size):
+ sub_args = [str(total_size), str(block_size)]
+ res = self.py.exec_rpc(
+ 'construct_malloc_bdev',
+ '10.0.2.15',
+ sub_args=sub_args)
+ print res
+
+ def delete_nvmf_subsystem(self, nqn):
+ sub_args = [nqn]
+ res = self.py.exec_rpc(
+ 'delete_nvmf_subsystem',
+ '10.0.2.15',
+ sub_args=sub_args)
+ print res
+
+ def construct_nvmf_subsystem(
+ self,
+ nqn,
+ listen,
+ hosts,
+ serial_number,
+ namespaces):
+ sub_args = [nqn, listen, hosts, serial_number, namespaces]
+ res = self.py.exec_rpc(
+ 'construct_nvmf_subsystem',
+ '10.0.2.15',
+ sub_args=sub_args)
+ print res
+
+ def get_nvmf_subsystems(self):
+ subsystems = self._get_json_objs(
+ 'get_nvmf_subsystems', '10.0.2.15')
+ return subsystems
+
+ def _get_json_objs(self, method, server_ip):
+ res = self.py.exec_rpc(method, server_ip)
+ json_obj = json.loads(res)
+ return json_obj \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/py_spdk.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/py_spdk.py
new file mode 100644
index 0000000..a298c18
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/py_spdk.py
@@ -0,0 +1,82 @@
+import psutil
+import re
+import os
+import subprocess
+
+
+class PySPDK(object):
+
+ def __init__(self, pname):
+ super(PySPDK, self).__init__()
+ self.pid = None
+ self.pname = pname
+
+ def start_server(self, spdk_dir, server_name):
+ if not self.is_alive():
+ self.init_hugepages(spdk_dir)
+ server_dir = os.path.join(spdk_dir, 'app/')
+ file_dir = self._search_file(server_dir, server_name)
+ print file_dir
+ os.chdir(file_dir)
+ p = subprocess.Popen(
+ 'sudo ./%s' % server_name,
+ shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ return out
+
+ def init_hugepages(self, spdk_dir):
+ huge_dir = os.path.join(spdk_dir, 'scripts/')
+ file_dir = self._search_file(huge_dir, 'setup.sh')
+ print file_dir
+ os.chdir(file_dir)
+ p = subprocess.Popen(
+ 'sudo ./setup.sh',
+ shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ return out
+
+ @staticmethod
+ def _search_file(spdk_dir, file_name):
+ for dirpath, dirnames, filenames in os.walk(spdk_dir):
+ for filename in filenames:
+ if filename == file_name:
+ return dirpath
+
+ def _get_process_id(self):
+ for proc in psutil.process_iter():
+ try:
+ pinfo = proc.as_dict(attrs=['pid', 'cmdline'])
+ if re.search(self.pname, str(pinfo.get('cmdline'))):
+ self.pid = pinfo.get('pid')
+ return self.pid
+ except psutil.NoSuchProcess:
+ print "NoSuchProcess:%s" % self.pname
+ print "NoSuchProcess:%s" % self.pname
+ return self.pid
+
+ def is_alive(self):
+ self.pid = self._get_process_id()
+ if self.pid:
+ p = psutil.Process(self.pid)
+ if p.is_running():
+ return True
+ return False
+
+ @staticmethod
+ def exec_rpc(method, server='127.0.0.1', port=5260, sub_args=None):
+ exec_cmd = ["./rpc.py", "-s", "-p"]
+ exec_cmd.insert(2, server)
+ exec_cmd.insert(4, str(port))
+ exec_cmd.insert(5, method)
+ if sub_args is None:
+ sub_args = []
+ exec_cmd.extend(sub_args)
+ p = subprocess.Popen(
+ exec_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE
+ )
+ out, err = p.communicate()
+ return out \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/vhost_client.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/vhost_client.py
new file mode 100644
index 0000000..63bce70
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/util/pyspdk/vhost_client.py
@@ -0,0 +1,121 @@
+import json
+
+
+class VhostTgt(object):
+
+ def __init__(self, py):
+ super(VhostTgt, self).__init__()
+ self.py = py
+
+ def get_rpc_methods(self):
+ rpc_methods = self._get_json_objs('get_rpc_methods', '127.0.0.1')
+ return rpc_methods
+
+ def get_scsi_devices(self):
+ scsi_devices = self._get_json_objs(
+ 'get_scsi_devices', '127.0.0.1')
+ return scsi_devices
+
+ def get_luns(self):
+ luns = self._get_json_objs('get_luns', '127.0.0.1')
+ return luns
+
+ def get_interfaces(self):
+ interfaces = self._get_json_objs(
+ 'get_interfaces', '127.0.0.1')
+ return interfaces
+
+ def add_ip_address(self, ifc_index, ip_addr):
+ sub_args = [ifc_index, ip_addr]
+ res = self.py.exec_rpc(
+ 'add_ip_address',
+ '127.0.0.1',
+ sub_args=sub_args)
+ return res
+
+ def delete_ip_address(self, ifc_index, ip_addr):
+ sub_args = [ifc_index, ip_addr]
+ res = self.py.exec_rpc(
+ 'delete_ip_address',
+ '127.0.0.1',
+ sub_args=sub_args)
+ return res
+
+ def get_bdevs(self):
+ block_devices = self._get_json_objs(
+ 'get_bdevs', '127.0.0.1')
+ return block_devices
+
+ def delete_bdev(self, name):
+ sub_args = [name]
+ res = self.py.exec_rpc('delete_bdev', '127.0.0.1', sub_args=sub_args)
+ print res
+
+ def kill_instance(self, sig_name):
+ sub_args = [sig_name]
+ res = self.py.exec_rpc('kill_instance', '127.0.0.1', sub_args=sub_args)
+ print res
+
+ def construct_aio_bdev(self, filename, name, block_size):
+ sub_args = [filename, name, str(block_size)]
+ res = self.py.exec_rpc(
+ 'construct_aio_bdev',
+ '127.0.0.1',
+ sub_args=sub_args)
+ print res
+
+ def construct_error_bdev(self, basename):
+ sub_args = [basename]
+ res = self.py.exec_rpc(
+ 'construct_error_bdev',
+ '127.0.0.1',
+ sub_args=sub_args)
+ print res
+
+ def construct_nvme_bdev(
+ self,
+ name,
+ trtype,
+ traddr,
+ adrfam=None,
+ trsvcid=None,
+ subnqn=None):
+ sub_args = ["-b", "-t", "-a"]
+ sub_args.insert(1, name)
+ sub_args.insert(2, trtype)
+ sub_args.insert(3, traddr)
+ if adrfam is not None:
+ sub_args.append("-f")
+ sub_args.append(adrfam)
+ if trsvcid is not None:
+ sub_args.append("-s")
+ sub_args.append(trsvcid)
+ if subnqn is not None:
+ sub_args.append("-n")
+ sub_args.append(subnqn)
+ res = self.py.exec_rpc(
+ 'construct_nvme_bdev',
+ '127.0.0.1',
+ sub_args=sub_args)
+ return res
+
+ def construct_null_bdev(self, name, total_size, block_size):
+ sub_args = [name, str(total_size), str(block_size)]
+ res = self.py.exec_rpc(
+ 'construct_null_bdev',
+ '127.0.0.1',
+ sub_args=sub_args)
+ return res
+
+ def construct_malloc_bdev(self, total_size, block_size):
+ sub_args = [str(total_size), str(block_size)]
+ res = self.py.exec_rpc(
+ 'construct_malloc_bdev',
+ '10.0.2.15',
+ sub_args=sub_args)
+ print res
+
+ def _get_json_objs(self, method, server_ip):
+ res = self.py.exec_rpc(method, server_ip)
+ json_obj = json.loads(res)
+ return json_obj \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/vhost.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/vhost.py
new file mode 100644
index 0000000..b7aef33
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/accelerator/drivers/spdk/vhost/vhost.py
@@ -0,0 +1,92 @@
+"""
+SPDK VHOSTDRIVER module implementation.
+"""
+
+from cyborg.accelerator.drivers.spdk.util.pyspdk.vhost_client import VhostTgt
+from oslo_log import log as logging
+from cyborg.accelerator.drivers.spdk.util import common_fun
+from cyborg.accelerator.drivers.spdk.spdk import SPDKDRIVER
+from cyborg.accelerator.drivers.spdk.util.pyspdk.py_spdk import PySPDK
+
+LOG = logging.getLogger(__name__)
+
+
+class VHOSTDRIVER(SPDKDRIVER):
+ """VHOSTDRIVER class.
+ vhost server app should be able to implement this driver.
+ """
+
+ SERVER = 'vhost'
+
+ def __init__(self, *args, **kwargs):
+ super(VHOSTDRIVER, self).__init__(*args, **kwargs)
+ self.servers = common_fun.discover_servers()
+ self.py = common_fun.get_py_client(self.SERVER)
+
+ def discover_accelerator(self):
+ if common_fun.check_for_setup_error(self.py, self.SERVER):
+ return self.get_one_accelerator()
+
+ def get_one_accelerator(self):
+ acc_client = VhostTgt(self.py)
+ bdevs = acc_client.get_bdevs()
+ # Display current blockdev list
+ scsi_devices = acc_client.get_scsi_devices()
+ # Display SCSI devices
+ luns = acc_client.get_luns()
+ # Display active LUNs
+ interfaces = acc_client.get_interfaces()
+ # Display current interface list
+ accelerator_obj = {
+ 'server': self.SERVER,
+ 'bdevs': bdevs,
+ 'scsi_devices': scsi_devices,
+ 'luns': luns,
+ 'interfaces': interfaces
+ }
+ return accelerator_obj
+
+ def install_accelerator(self, driver_id, driver_type):
+ pass
+
+ def uninstall_accelerator(self, driver_id, driver_type):
+ pass
+
+ def accelerator_list(self):
+ return self.get_all_accelerators()
+
+ def get_all_accelerators(self):
+ accelerators = []
+ for accelerator_i in range(len(self.servers)):
+ accelerator = self.servers[accelerator_i]
+ py_tmp = PySPDK(accelerator)
+ if py_tmp.is_alive():
+ accelerators.append(self.get_one_accelerator())
+ return accelerators
+
+ def update(self, driver_type, **kwargs):
+ pass
+
+ def attach_instance(self, instance_id):
+ pass
+
+ def detach_instance(self, instance_id):
+ pass
+
+ def add_ip_address(self, ifc_index, ip_addr):
+ """Add IP address
+ :param ifc_index: ifc index of the nic device.
+ :param ip_addr: ip address will be added.
+ :return: ip_address
+ """
+ acc_client = VhostTgt(self.py)
+ return acc_client.add_ip_address(ifc_index, ip_addr)
+
+ def delete_ip_address(self, ifc_index, ip_addr):
+ """Delete IP address
+ :param ifc_index: ifc index of the nic device.
+ :param ip_addr: ip address will be added.
+ :return: ip_address
+ """
+ acc_client = VhostTgt(self.py)
+ return acc_client.delete_ip_address(ifc_index, ip_addr) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/manager.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/manager.py
new file mode 100755
index 0000000..5df12d0
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/manager.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+import oslo_messaging as messaging
+from oslo_service import periodic_task
+
+from cyborg.accelerator.drivers.modules import netronome
+from cyborg.accelerator.drivers.fpga.base import FPGADriver
+from cyborg.accelerator.drivers.gpu.base import GPUDriver
+from cyborg.agent.resource_tracker import ResourceTracker
+from cyborg.conductor import rpcapi as conductor_api
+
+from cyborg import objects
+
+from cyborg.conf import CONF
+from oslo_log import log as logging
+
+
+LOG = logging.getLogger(__name__)
+
+class AgentManager(periodic_task.PeriodicTasks):
+ """Cyborg Agent manager main class."""
+
+ RPC_API_VERSION = '1.0'
+ target = messaging.Target(version=RPC_API_VERSION)
+
+ def __init__(self, topic, host=None):
+ super(AgentManager, self).__init__(CONF)
+ #can only use in the same node, change it to RPC to conductor
+ self.conductor_api = conductor_api.ConductorAPI()
+ self.topic = topic
+ self.host = host or CONF.host
+ self.fpga_driver = FPGADriver()
+ self._rt = ResourceTracker(host, self.conductor_api)
+ self.gpu_driver = GPUDriver()
+
+ def periodic_tasks(self, context, raise_on_error=False):
+# self.update_available_resource(context)
+ return self.run_periodic_tasks(context,raise_on_error=raise_on_error)
+
+ def hardware_list(self, context, values):
+ """List installed hardware."""
+ pass
+
+ def fpga_program(self, context, accelerator, image):
+ """Program a FPGA region, image can be a url or local file."""
+ #TODO Get image from glance
+ # And add claim and rollback logical
+ raise NotImplementedError()
+
+ @periodic_task.periodic_task(run_immediately=True)
+ def update_available_resource(self,context, startup=True):
+ """update all kinds of accelerator resources from their drivers."""
+ driver = netronome.NETRONOMEDRIVER()
+ port_resource = driver.get_available_resource()
+ if port_resource:
+ self.conductor_api.port_bulk_create(context, port_resource)
+ self._rt.update_usage(context)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/resource_tracker.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/resource_tracker.py
new file mode 100644
index 0000000..d17646c
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/resource_tracker.py
@@ -0,0 +1,206 @@
+# Copyright (c) 2018 Intel.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Track resources like FPGA GPU and QAT for a host. Provides the
+conductor with useful information about availability through the accelerator
+model.
+"""
+
+from oslo_log import log as logging
+from oslo_messaging.rpc.client import RemoteError
+from oslo_utils import uuidutils
+
+from cyborg.accelerator.drivers.fpga.base import FPGADriver
+from cyborg.accelerator.drivers.gpu.base import GPUDriver
+from cyborg.common import utils
+from cyborg import objects
+
+
+LOG = logging.getLogger(__name__)
+
+AGENT_RESOURCE_SEMAPHORE = "agent_resources"
+
+DEPLOYABLE_VERSION = "1.0"
+
+# need to change the driver field name
+DEPLOYABLE_HOST_MAPS = {"assignable": "assignable",
+ "pcie_address": "devices",
+ "board": "product_id",
+ "type": "function",
+ "vendor": "vendor_id",
+ "name": "name"}
+
+
+class ResourceTracker(object):
+ """Agent helper class for keeping track of resource usage when hardware
+ Accelerator resources updated. Update the Deployable DB through conductor.
+ """
+
+ def __init__(self, host, cond_api):
+ # FIXME (Shaohe) local cache for Accelerator.
+ # Will fix it in next release.
+ self.fpgas = None
+ self.host = host
+ self.conductor_api = cond_api
+ self.fpga_driver = FPGADriver()
+ self.gpu_driver = GPUDriver()
+
+ @utils.synchronized(AGENT_RESOURCE_SEMAPHORE)
+ def claim(self, context):
+ pass
+
+ def _fpga_compare_and_update(self, host_dev, acclerator):
+ need_updated = False
+ for k, v in DEPLOYABLE_HOST_MAPS.items():
+ if acclerator[k] != host_dev[v]:
+ need_updated = True
+ acclerator[k] = host_dev[v]
+ return need_updated
+
+ def _gen_deployable_from_host_dev(self, host_dev):
+ dep = {}
+ for k, v in DEPLOYABLE_HOST_MAPS.items():
+ dep[k] = host_dev[v]
+ dep["host"] = self.host
+ dep["version"] = DEPLOYABLE_VERSION
+ dep["availability"] = "free"
+ dep["uuid"] = uuidutils.generate_uuid()
+ return dep
+
+ @utils.synchronized(AGENT_RESOURCE_SEMAPHORE)
+ def update_usage(self, context):
+ """Update the resource usage and stats after a change in an
+ instance
+ """
+ def create_deployable(fpgas, bdf, parent_uuid=None):
+ fpga = fpgas[bdf]
+ dep = self._gen_deployable_from_host_dev(fpga)
+ # if parent_uuid:
+ dep["parent_uuid"] = parent_uuid
+ obj_dep = objects.Deployable(context, **dep)
+ new_dep = self.conductor_api.deployable_create(context, obj_dep)
+ return new_dep
+
+ self.update_gpu_usage(context)
+ # NOTE(Shaohe Feng) need more agreement on how to keep consistency.
+ fpgas = self._get_fpga_devices()
+ bdfs = set(fpgas.keys())
+ deployables = self.conductor_api.deployable_get_by_host(
+ context, self.host)
+
+ # NOTE(Shaohe Feng) when no "pcie_address" in deployable?
+ accls = dict([(v["pcie_address"], v) for v in deployables
+ if v["type"] == "FPGA"])
+ accl_bdfs = set(accls.keys())
+
+ # Firstly update
+ for mutual in accl_bdfs & bdfs:
+ accl = accls[mutual]
+ if self._fpga_compare_and_update(fpgas[mutual], accl):
+ try:
+ self.conductor_api.deployable_update(context, accl)
+ except RemoteError as e:
+ LOG.error(e)
+ # Add
+ new = bdfs - accl_bdfs
+ new_pf = set([n for n in new if fpgas[n]["function"] == "pf"])
+ for n in new_pf:
+ new_dep = create_deployable(fpgas, n)
+ accls[n] = new_dep
+ sub_vf = set()
+ if "regions" in n:
+ sub_vf = set([sub["devices"] for sub in fpgas[n]["regions"]])
+ for vf in sub_vf & new:
+ new_dep = create_deployable(fpgas, vf, new_dep["uuid"])
+ accls[vf] = new_dep
+ new.remove(vf)
+ for n in new - new_pf:
+ p_bdf = fpgas[n]["parent_devices"]
+ p_accl = accls[p_bdf]
+ p_uuid = p_accl["uuid"]
+ new_dep = create_deployable(fpgas, n, p_uuid)
+
+ # Delete
+ for obsolete in accl_bdfs - bdfs:
+ try:
+ self.conductor_api.deployable_delete(context, accls[obsolete])
+ except RemoteError as e:
+ LOG.error(e)
+ del accls[obsolete]
+
+ def _get_fpga_devices(self):
+
+ def form_dict(devices, fpgas):
+ for v in devices:
+ fpgas[v["devices"]] = v
+ if "regions" in v:
+ form_dict(v["regions"], fpgas)
+
+ fpgas = {}
+ vendors = self.fpga_driver.discover_vendors()
+ for v in vendors:
+ driver = self.fpga_driver.create(v)
+ form_dict(driver.discover(), fpgas)
+ return fpgas
+
+ def update_gpu_usage(self, context):
+ """Update the gpu resource usage and stats after a change in an
+ instance, for the original update_usage specified update fpga, define a
+ new func update gpu here.
+ """
+ def create_deployable(gpus, bdf, parent_uuid=None):
+ gpu = gpus[bdf]
+ dep = self._gen_deployable_from_host_dev(gpu)
+ # if parent_uuid:
+ dep["parent_uuid"] = parent_uuid
+ obj_dep = objects.Deployable(context, **dep)
+ new_dep = self.conductor_api.deployable_create(context, obj_dep)
+ return new_dep
+ gpus = self._get_gpu_devices()
+ deployables = self.conductor_api.deployable_get_by_host(
+ context, self.host)
+
+ accls = dict([(v["pcie_address"], v) for v in deployables
+ if v["type"] == "GPU"])
+ all_gpus = dict([(v["devices"], v) for v in gpus])
+
+ # Add
+ new = set(all_gpus.keys()) - set(accls.keys())
+ new_gpus = [all_gpus[n] for n in new]
+ for n in new_gpus:
+ dep = self._gen_deployable_from_host_dev(n)
+ # if parent_uuid:
+ dep["parent_uuid"] = None
+ obj_dep = objects.Deployable(context, **dep)
+ self.conductor_api.deployable_create(context, obj_dep)
+
+ # Delete
+ not_exists = set(accls.keys()) - set(all_gpus.keys())
+ for obsolete in not_exists:
+ try:
+ self.conductor_api.deployable_delete(context, accls[obsolete])
+ except RemoteError as e:
+ LOG.error(e)
+ del accls[obsolete]
+
+ def _get_gpu_devices(self):
+ gpus = []
+ vendors = self.gpu_driver.discover_vendors()
+ for v in vendors:
+ driver = self.gpu_driver.create(v)
+ if driver:
+ gpus.extend(driver.discover())
+ return gpus
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/rpcapi.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/rpcapi.py
new file mode 100644
index 0000000..f683dc0
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/agent/rpcapi.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Client side of the conductor RPC API."""
+
+from oslo_config import cfg
+import oslo_messaging as messaging
+
+from cyborg.common import constants
+from cyborg.common import rpc
+from cyborg.objects import base as objects_base
+
+
+CONF = cfg.CONF
+
+
+class AgentAPI(object):
+ """Client side of the Agent RPC API.
+
+ API version history:
+
+ | 1.0 - Initial version.
+
+ """
+
+ RPC_API_VERSION = '1.0'
+
+ def __init__(self, topic=None):
+ super(AgentAPI, self).__init__()
+ self.topic = topic or constants.AGENT_TOPIC
+ target = messaging.Target(topic=self.topic,
+ version='1.0')
+ serializer = objects_base.CyborgObjectSerializer()
+ self.client = rpc.get_client(target,
+ version_cap=self.RPC_API_VERSION,
+ serializer=serializer)
+
+ def hardware_list(self, context, values):
+ """Signal the agent to find local hardware."""
+ pass
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/app.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/app.py
new file mode 100644
index 0000000..95862da
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/app.py
@@ -0,0 +1,66 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+
+from oslo_config import cfg
+
+from cyborg.api import config
+from cyborg.api import hooks
+from cyborg.api import middleware
+
+
+def get_pecan_config():
+ # Set up the pecan configuration
+ filename = config.__file__.replace('.pyc', '.py')
+ return pecan.configuration.conf_from_file(filename)
+
+
+def setup_app(pecan_config=None, extra_hooks=None):
+ app_hooks = [hooks.ConfigHook(),
+ hooks.ConductorAPIHook(),
+ hooks.ContextHook(pecan_config.app.acl_public_routes),
+ hooks.PublicUrlHook()]
+ if extra_hooks:
+ app_hooks.extend(extra_hooks)
+
+ if not pecan_config:
+ pecan_config = get_pecan_config()
+
+ pecan.configuration.set_config(dict(pecan_config), overwrite=True)
+
+ app = pecan.make_app(
+ pecan_config.app.root,
+ static_root=pecan_config.app.static_root,
+ debug=False,
+ force_canonical=getattr(pecan_config.app, 'force_canonical', True),
+ hooks=app_hooks,
+ wrap_app=middleware.ParsableErrorMiddleware
+ )
+
+ app = middleware.AuthTokenMiddleware(
+ app, dict(cfg.CONF),
+ public_api_routes=pecan_config.app.acl_public_routes)
+
+ return app
+
+
+class VersionSelectorApplication(object):
+ def __init__(self):
+ pc = get_pecan_config()
+ self.v1 = setup_app(pecan_config=pc)
+
+ def __call__(self, environ, start_response):
+ return self.v1(environ, start_response)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/config.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/config.py
new file mode 100644
index 0000000..32a0d27
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/config.py
@@ -0,0 +1,40 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# Server Specific Configurations
+# See https://pecan.readthedocs.org/en/latest/configuration.html#server-configuration # noqa
+server = {
+ 'port': '6666',
+ 'host': '0.0.0.0'
+}
+
+# Pecan Application Configurations
+# See https://pecan.readthedocs.org/en/latest/configuration.html#application-configuration # noqa
+app = {
+ 'root': 'cyborg.api.controllers.root.RootController',
+ 'modules': ['cyborg.api'],
+ 'static_root': '%(confdir)s/public',
+ 'debug': False,
+ 'acl_public_routes': [
+ '/',
+ '/v1'
+ ]
+}
+
+# WSME Configurations
+# See https://wsme.readthedocs.org/en/latest/integrate.html#configuration
+wsme = {
+ 'debug': False
+}
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/base.py
new file mode 100644
index 0000000..9131d3a
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/base.py
@@ -0,0 +1,33 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import wsme
+from wsme import types as wtypes
+
+
+class APIBase(wtypes.Base):
+ created_at = wsme.wsattr(datetime.datetime, readonly=True)
+ """The time in UTC at which the object is created"""
+
+ updated_at = wsme.wsattr(datetime.datetime, readonly=True)
+ """The time in UTC at which the object is updated"""
+
+ def as_dict(self):
+ """Render this object as a dict of its fields."""
+ return dict((k, getattr(self, k))
+ for k in self.fields
+ if hasattr(self, k) and getattr(self, k) != wsme.Unset)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/link.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/link.py
new file mode 100644
index 0000000..fe39c69
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/link.py
@@ -0,0 +1,48 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from wsme import types as wtypes
+
+from cyborg.api.controllers import base
+
+
+def build_url(resource, resource_args, bookmark=False, base_url=None):
+ if base_url is None:
+ base_url = pecan.request.public_url
+
+ template = '%(url)s/%(res)s' if bookmark else '%(url)s/v1/%(res)s'
+ template += '%(args)s' if resource_args.startswith('?') else '/%(args)s'
+ return template % {'url': base_url, 'res': resource, 'args': resource_args}
+
+
+class Link(base.APIBase):
+ """A link representation."""
+
+ href = wtypes.text
+ """The url of a link."""
+
+ rel = wtypes.text
+ """The name of a link."""
+
+ type = wtypes.text
+ """Indicates the type of document/link."""
+
+ @staticmethod
+ def make_link(rel_name, url, resource, resource_args,
+ bookmark=False, type=wtypes.Unset):
+ href = build_url(resource, resource_args,
+ bookmark=bookmark, base_url=url)
+ return Link(href=href, rel=rel_name, type=type)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/root.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/root.py
new file mode 100644
index 0000000..3361bfe
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/root.py
@@ -0,0 +1,72 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from pecan import rest
+from wsme import types as wtypes
+
+from cyborg.api.controllers import base
+from cyborg.api.controllers import v1
+from cyborg.api import expose
+
+
+VERSION1 = 'v1'
+
+
+class Root(base.APIBase):
+ name = wtypes.text
+ """The name of the API"""
+
+ description = wtypes.text
+ """Some information about this API"""
+
+ @staticmethod
+ def convert():
+ root = Root()
+ root.name = 'OpenStack Cyborg API'
+ root.description = (
+ 'Cyborg (previously known as Nomad) is an '
+ 'OpenStack project that aims to provide a general '
+ 'purpose management framework for acceleration '
+ 'resources (i.e. various types of accelerators '
+ 'such as Crypto cards, GPU, FPGA, NVMe/NOF SSDs, '
+ 'ODP, DPDK/SPDK and so on).')
+ return root
+
+
+class RootController(rest.RestController):
+ _versions = [VERSION1]
+ """All supported API versions"""
+
+ _default_version = VERSION1
+ """The default API version"""
+
+ v1 = v1.Controller()
+
+ @expose.expose(Root)
+ def get(self):
+ return Root.convert()
+
+ @pecan.expose()
+ def _route(self, args, request=None):
+ """Overrides the default routing behavior.
+
+ It redirects the request to the default version of the cyborg API
+ if the version number is not specified in the url.
+ """
+
+ if args[0] and args[0] not in self._versions:
+ args = [self._default_version] + args
+ return super(RootController, self)._route(args)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/__init__.py
new file mode 100644
index 0000000..661e7a0
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/__init__.py
@@ -0,0 +1,73 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Version 1 of the Cyborg API"""
+
+import pecan
+from pecan import rest
+from wsme import types as wtypes
+
+from cyborg.api.controllers import base
+from cyborg.api.controllers import link
+from cyborg.api.controllers.v1 import accelerators
+from cyborg.api.controllers.v1 import ports
+from cyborg.api.controllers.v1 import deployables
+from cyborg.api import expose
+
+
+class V1(base.APIBase):
+ """The representation of the version 1 of the API."""
+
+ id = wtypes.text
+ """The ID of the version"""
+
+ accelerator = [link.Link]
+ """Links to the accelerator resource"""
+
+ port = [link.Link]
+ """Links to the port resource"""
+
+ @staticmethod
+ def convert():
+ v1 = V1()
+ v1.id = 'v1'
+ v1.accelerator = [
+ link.Link.make_link('self', pecan.request.public_url,
+ 'accelerator', ''),
+ link.Link.make_link('bookmark', pecan.request.public_url,
+ 'accelerator', '', bookmark=True)
+ ]
+ v1.port = [
+ link.Link.make_link('self', pecan.request.public_url,
+ 'port', ''),
+ link.Link.make_link('bookmark', pecan.request.public_url,
+ 'port', '', bookmark=True)
+ ]
+ return v1
+
+
+class Controller(rest.RestController):
+ """Version 1 API controller root"""
+
+ accelerators = accelerators.AcceleratorsController()
+ ports = ports.PortsController()
+ deployables = deployables.DeployablesController()
+
+ @expose.expose(V1)
+ def get(self):
+ return V1.convert()
+
+
+__all__ = ('Controller',)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/accelerators.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/accelerators.py
new file mode 100644
index 0000000..e31d580
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/accelerators.py
@@ -0,0 +1,233 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from pecan import rest
+from six.moves import http_client
+import wsme
+from wsme import types as wtypes
+
+from cyborg.api.controllers import base
+from cyborg.api.controllers import link
+from cyborg.api.controllers.v1 import types
+from cyborg.api import expose
+from cyborg.common import policy
+from cyborg import objects
+from cyborg.api.controllers.v1 import utils as api_utils
+from cyborg.common import exception
+
+
+class Accelerator(base.APIBase):
+ """API representation of a accelerator.
+
+ This class enforces type checking and value constraints, and converts
+ between the internal object model and the API representation of
+ a accelerator.
+ """
+
+ uuid = types.uuid
+ """The UUID of the accelerator"""
+
+ name = wtypes.text
+ """The name of the accelerator"""
+
+ description = wtypes.text
+ """The description of the accelerator"""
+
+ project_id = types.uuid
+ """The project UUID of the accelerator"""
+
+ user_id = types.uuid
+ """The user UUID of the accelerator"""
+
+ device_type = wtypes.text
+ """The device type of the accelerator"""
+
+ acc_type = wtypes.text
+ """The type of the accelerator"""
+
+ acc_capability = wtypes.text
+ """The capability of the accelerator"""
+
+ vendor_id = wtypes.text
+ """The vendor id of the accelerator"""
+
+ product_id = wtypes.text
+ """The product id of the accelerator"""
+
+ remotable = wtypes.IntegerType()
+ """Whether the accelerator is remotable"""
+
+ links = wsme.wsattr([link.Link], readonly=True)
+ """A list containing a self link"""
+
+ def __init__(self, **kwargs):
+ super(Accelerator, self).__init__(**kwargs)
+ self.fields = []
+ for field in objects.Accelerator.fields:
+ self.fields.append(field)
+ setattr(self, field, kwargs.get(field, wtypes.Unset))
+
+ @classmethod
+ def convert_with_links(cls, obj_acc):
+ api_acc = cls(**obj_acc.as_dict())
+ url = pecan.request.public_url
+ api_acc.links = [
+ link.Link.make_link('self', url, 'accelerators', api_acc.uuid),
+ link.Link.make_link('bookmark', url, 'accelerators', api_acc.uuid,
+ bookmark=True)
+ ]
+ return api_acc
+
+
+class AcceleratorCollection(base.APIBase):
+ """API representation of a collection of accelerators."""
+
+ accelerators = [Accelerator]
+ """A list containing accelerator objects"""
+
+ @classmethod
+ def convert_with_links(cls, obj_accs):
+ collection = cls()
+ collection.accelerators = [Accelerator.convert_with_links(obj_acc)
+ for obj_acc in obj_accs]
+ return collection
+
+
+class AcceleratorPatchType(types.JsonPatchType):
+
+ _api_base = Accelerator
+
+ @staticmethod
+ def internal_attrs():
+ defaults = types.JsonPatchType.internal_attrs()
+ return defaults + ['/project_id', '/user_id', '/device_type',
+ '/acc_type', '/acc_capability', '/vendor_id',
+ '/product_id', '/remotable']
+
+
+class AcceleratorsControllerBase(rest.RestController):
+
+ _resource = None
+
+ def _get_resource(self, uuid):
+ self._resource = objects.Accelerator.get(pecan.request.context, uuid)
+ return self._resource
+
+
+class AcceleratorsController(AcceleratorsControllerBase):
+ """REST controller for Accelerators."""
+
+ @policy.authorize_wsgi("cyborg:accelerator", "create", False)
+ @expose.expose(Accelerator, body=types.jsontype,
+ status_code=http_client.CREATED)
+ def post(self, acc):
+ """Create a new accelerator.
+
+ :param acc: an accelerator within the request body.
+ """
+ context = pecan.request.context
+ obj_acc = objects.Accelerator(context, **acc)
+ new_acc = pecan.request.conductor_api.accelerator_create(
+ context, obj_acc )
+ # Set the HTTP Location Header
+ pecan.response.location = link.build_url('accelerators', new_acc.uuid)
+ return Accelerator.convert_with_links(new_acc)
+
+ @policy.authorize_wsgi("cyborg:accelerator", "get")
+ @expose.expose(Accelerator, types.uuid)
+ def get_one(self, uuid):
+ """Retrieve information about the given accelerator.
+
+ :param uuid: UUID of an accelerator.
+ """
+ obj_acc = self._resource or self._get_resource(uuid)
+ return Accelerator.convert_with_links(obj_acc)
+
+ @expose.expose(AcceleratorCollection, int, types.uuid, wtypes.text,
+ wtypes.text, types.boolean)
+ def get_all(self, limit=None, marker=None, sort_key='id', sort_dir='asc',
+ all_tenants=None):
+ """Retrieve a list of accelerators.
+
+ :param limit: Optional, to determinate the maximum number of
+ accelerators to return.
+ :param marker: Optional, to display a list of accelerators after this
+ marker.
+ :param sort_key: Optional, to sort the returned accelerators list by
+ this specified key value.
+ :param sort_dir: Optional, to return a list of accelerators with this
+ sort direction.
+ :param all_tenants: Optional, allows administrators to see the
+ accelerators owned by all tenants, otherwise only
+ the accelerators associated with the calling
+ tenant are included in the response.
+ """
+ context = pecan.request.context
+ project_only = True
+ if context.is_admin and all_tenants:
+ project_only = False
+
+ marker_obj = None
+ if marker:
+ marker_obj = objects.Accelerator.get(context, marker)
+
+ obj_accs = objects.Accelerator.list(context, limit, marker_obj,
+ sort_key, sort_dir, project_only)
+ return AcceleratorCollection.convert_with_links(obj_accs)
+
+ @policy.authorize_wsgi("cyborg:accelerator", "update")
+ @expose.expose(Accelerator, types.uuid, body=[AcceleratorPatchType])
+ def patch(self, uuid, patch):
+ """Update an accelerator.
+
+ :param uuid: UUID of an accelerator.
+ :param patch: a json PATCH document to apply to this accelerator.
+ """
+ obj_acc = self._resource or self._get_resource(uuid)
+ try:
+ api_acc = Accelerator(
+ **api_utils.apply_jsonpatch(obj_acc.as_dict(), patch))
+ except api_utils.JSONPATCH_EXCEPTIONS as e:
+ raise exception.PatchError(patch=patch, reason=e)
+
+ # Update only the fields that have changed
+ for field in objects.Accelerator.fields:
+ try:
+ patch_val = getattr(api_acc, field)
+ except AttributeError:
+ # Ignore fields that aren't exposed in the API
+ continue
+ if patch_val == wtypes.Unset:
+ patch_val = None
+ if obj_acc[field] != patch_val:
+ obj_acc[field] = patch_val
+
+ context = pecan.request.context
+ new_acc = pecan.request.conductor_api.accelerator_update(context,
+ obj_acc)
+ return Accelerator.convert_with_links(new_acc)
+
+ @policy.authorize_wsgi("cyborg:accelerator", "delete")
+ @expose.expose(None, types.uuid, status_code=http_client.NO_CONTENT)
+ def delete(self, uuid):
+ """Delete an accelerator.
+
+ :param uuid: UUID of an accelerator.
+ """
+ obj_acc = self._resource or self._get_resource(uuid)
+ context = pecan.request.context
+ pecan.request.conductor_api.accelerator_delete(context, obj_acc)
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/deployables.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/deployables.py
new file mode 100644
index 0000000..8a4a12f
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/deployables.py
@@ -0,0 +1,210 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from pecan import rest
+from six.moves import http_client
+import wsme
+from wsme import types as wtypes
+
+from cyborg.api.controllers import base
+from cyborg.api.controllers import link
+from cyborg.api.controllers.v1 import types
+from cyborg.api.controllers.v1 import utils as api_utils
+from cyborg.api import expose
+from cyborg.common import exception
+from cyborg.common import policy
+from cyborg import objects
+
+
+class Deployable(base.APIBase):
+ """API representation of a deployable.
+ This class enforces type checking and value constraints, and converts
+ between the internal object model and the API representation of
+ a deployable.
+ """
+
+ uuid = types.uuid
+ """The UUID of the deployable"""
+
+ name = wtypes.text
+ """The name of the deployable"""
+
+ parent_uuid = types.uuid
+ """The parent UUID of the deployable"""
+
+ root_uuid = types.uuid
+ """The root UUID of the deployable"""
+
+ pcie_address = wtypes.text
+ """The pcie address of the deployable"""
+
+ host = wtypes.text
+ """The host on which the deployable is located"""
+
+ board = wtypes.text
+ """The board of the deployable"""
+
+ vendor = wtypes.text
+ """The vendor of the deployable"""
+
+ version = wtypes.text
+ """The version of the deployable"""
+
+ type = wtypes.text
+ """The type of the deployable"""
+
+ assignable = types.boolean
+ """Whether the deployable is assignable"""
+
+ instance_uuid = types.uuid
+ """The UUID of the instance which deployable is assigned to"""
+
+ availability = wtypes.text
+ """The availability of the deployable"""
+
+ links = wsme.wsattr([link.Link], readonly=True)
+ """A list containing a self link"""
+
+ def __init__(self, **kwargs):
+ super(Deployable, self).__init__(**kwargs)
+ self.fields = []
+ for field in objects.Deployable.fields:
+ self.fields.append(field)
+ setattr(self, field, kwargs.get(field, wtypes.Unset))
+
+ @classmethod
+ def convert_with_links(cls, obj_dep):
+ api_dep = cls(**obj_dep.as_dict())
+ url = pecan.request.public_url
+ api_dep.links = [
+ link.Link.make_link('self', url, 'deployables', api_dep.uuid),
+ link.Link.make_link('bookmark', url, 'deployables', api_dep.uuid,
+ bookmark=True)
+ ]
+ return api_dep
+
+
+class DeployableCollection(base.APIBase):
+ """API representation of a collection of deployables."""
+
+ deployables = [Deployable]
+ """A list containing deployable objects"""
+
+ @classmethod
+ def convert_with_links(cls, obj_deps):
+ collection = cls()
+ collection.deployables = [Deployable.convert_with_links(obj_dep)
+ for obj_dep in obj_deps]
+ return collection
+
+
+class DeployablePatchType(types.JsonPatchType):
+
+ _api_base = Deployable
+
+ @staticmethod
+ def internal_attrs():
+ defaults = types.JsonPatchType.internal_attrs()
+ return defaults + ['/pcie_address', '/host', '/type']
+
+
+class DeployablesController(rest.RestController):
+ """REST controller for Deployables."""
+
+ @policy.authorize_wsgi("cyborg:deployable", "create", False)
+ @expose.expose(Deployable, body=types.jsontype,
+ status_code=http_client.CREATED)
+ def post(self, dep):
+ """Create a new deployable.
+ :param dep: a deployable within the request body.
+ """
+ context = pecan.request.context
+ obj_dep = objects.Deployable(context, **dep)
+ new_dep = pecan.request.conductor_api.deployable_create(context,
+ obj_dep)
+ # Set the HTTP Location Header
+ pecan.response.location = link.build_url('deployables', new_dep.uuid)
+ return Deployable.convert_with_links(new_dep)
+
+ @policy.authorize_wsgi("cyborg:deployable", "get_one")
+ @expose.expose(Deployable, types.uuid)
+ def get_one(self, uuid):
+ """Retrieve information about the given deployable.
+ :param uuid: UUID of a deployable.
+ """
+
+ obj_dep = objects.Deployable.get(pecan.request.context, uuid)
+ return Deployable.convert_with_links(obj_dep)
+
+ @policy.authorize_wsgi("cyborg:deployable", "get_all")
+ @expose.expose(DeployableCollection, types.uuid, wtypes.text,
+ wtypes.text, types.boolean)
+ def get_all(self, root_uuid=None, host=None, type=None, assignable=None):
+ """Retrieve a list of deployables."""
+ filters = {}
+ if root_uuid:
+ filters["root_uuid"] = root_uuid
+ if host:
+ filters["host"] = host
+ if type:
+ filters["type"] = type
+ if assignable:
+ filters["assignable"] = assignable
+ obj_deps = objects.Deployable.get_by_filter(pecan.request.context,
+ filters=filters)
+ return DeployableCollection.convert_with_links(obj_deps)
+
+ @policy.authorize_wsgi("cyborg:deployable", "update")
+ @expose.expose(Deployable, types.uuid, body=[DeployablePatchType])
+ def patch(self, uuid, patch):
+ """Update a deployable.
+ :param uuid: UUID of a deployable.
+ :param patch: a json PATCH document to apply to this deployable.
+ """
+ context = pecan.request.context
+ obj_dep = objects.Deployable.get(context, uuid)
+
+ try:
+ api_dep = Deployable(
+ **api_utils.apply_jsonpatch(obj_dep.as_dict(), patch))
+ except api_utils.JSONPATCH_EXCEPTIONS as e:
+ raise exception.PatchError(patch=patch, reason=e)
+
+ # Update only the fields that have changed
+ for field in objects.Deployable.fields:
+ try:
+ patch_val = getattr(api_dep, field)
+ except AttributeError:
+ # Ignore fields that aren't exposed in the API
+ continue
+ if patch_val == wtypes.Unset:
+ patch_val = None
+ if obj_dep[field] != patch_val:
+ obj_dep[field] = patch_val
+
+ new_dep = pecan.request.conductor_api.deployable_update(context,
+ obj_dep)
+ return Deployable.convert_with_links(new_dep)
+
+ @policy.authorize_wsgi("cyborg:deployable", "delete")
+ @expose.expose(None, types.uuid, status_code=http_client.NO_CONTENT)
+ def delete(self, uuid):
+ """Delete a deployable.
+ :param uuid: UUID of a deployable.
+ """
+ context = pecan.request.context
+ obj_dep = objects.Deployable.get(context, uuid)
+ pecan.request.conductor_api.deployable_delete(context, obj_dep) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/ports.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/ports.py
new file mode 100644
index 0000000..7d6c1dd
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/ports.py
@@ -0,0 +1,269 @@
+# Copyright 2018 Lenovo Research Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from pecan import rest
+from six.moves import http_client
+import wsme
+from wsme import types as wtypes
+
+from cyborg.api.controllers import base
+from cyborg.api.controllers import link
+from cyborg.api.controllers.v1 import types
+from cyborg.api import expose
+from pecan import expose as pexpose
+from cyborg.common import policy
+from cyborg import objects
+from cyborg.api.controllers.v1 import utils as api_utils
+from cyborg.common import exception
+
+from oslo_log import log as logging
+
+LOG = logging.getLogger(__name__)
+
+class Port(base.APIBase):
+ """API representation of a port.
+
+ This class enforces type checking and value constraints, and converts
+ between the internal object model and the API representation of
+ a port.
+ """
+
+ uuid = types.uuid
+ computer_id = types.uuid
+ phy_port_name = wtypes.text
+ pci_slot = wtypes.text
+ product_id = wtypes.text
+ vendor_id = wtypes.text
+ is_used = wtypes.IntegerType()
+ accelerator_id = types.uuid
+ bind_instance_id = types.uuid
+ bind_port_id = types.uuid
+ device_type = wtypes.text
+
+ links = wsme.wsattr([link.Link], readonly=True)
+ """A list containing a self link"""
+
+ def __init__(self, **kwargs):
+ self.fields = []
+ for field in objects.Port.fields:
+ self.fields.append(field)
+ setattr(self, field, kwargs.get(field, wtypes.Unset))
+
+ @classmethod
+ def convert_with_links(cls, rpc_acc):
+ port = Port(**rpc_acc.as_dict())
+ url = pecan.request.public_url
+ port.links = [
+ link.Link.make_link('self', url, 'ports',
+ port.uuid),
+ link.Link.make_link('bookmark', url, 'ports',
+ port.uuid, bookmark=True)
+ ]
+
+ return port
+
+
+
+class PortCollection(base.APIBase):
+ """API representation of a collection of ports."""
+
+ ports = [Port]
+ """A list containing port objects"""
+
+ @classmethod
+ def convert_with_links(cls, rpc_ports):
+ collection = cls()
+ collection.ports = [Port.convert_with_links(obj_port)
+ for obj_port in rpc_ports]
+ return collection
+
+
+class PortPatchType(types.JsonPatchType):
+
+ _api_base = Port
+
+ @staticmethod
+ def internal_attrs():
+ defaults = types.JsonPatchType.internal_attrs()
+ return defaults + ['/computer_id', '/phy_port_name', '/pci_slot',
+ '/vendor_id', '/product_id']
+
+
+class PortsControllerBase(rest.RestController):
+ _resource = None
+ def _get_resource(self, uuid):
+ self._resource = objects.Port.get(pecan.request.context, uuid)
+ return self._resource
+
+
+class BindPortController(PortsControllerBase):
+ # url path: /v1/ports/bind/{uuid}
+
+ @expose.expose(Port, body=types.jsontype)
+ def put(self, uuid, patch):
+ """bind a existing port to a logical neutron port.
+ : param uuid: UUID of a port.
+ : param patch: a json type to apply to this port.
+ """
+ context = pecan.request.context
+ obj_port = self._resource or self._get_resource(uuid)
+ # object with user modified properties.
+ mod_port = objects.Port(context, **patch)
+
+ # update fields used in bind.
+ obj_port["accelerator_id"] = mod_port["accelerator_id"]
+ obj_port["bind_instance_id"] = mod_port["bind_instance_id"]
+ obj_port["bind_port_id"] = mod_port["bind_port_id"]
+ obj_port["is_used"] = mod_port["is_used"]
+ obj_port["device_type"] = mod_port["device_type"]
+
+ LOG.debug(obj_port)
+ new_port = pecan.request.conductor_api.port_update(context, obj_port)
+ return Port.convert_with_links(new_port)
+
+class UnBindPortController(PortsControllerBase):
+ # url path: /v1/ports/bind/{uuid}
+
+ @expose.expose(Port, body=types.jsontype)
+ def put(self, uuid):
+ """unbind a existing port, set some areas to null in DB.
+ : param uuid: UUID of a port.
+ : param patch: a json type to apply to this port.
+ """
+ context = pecan.request.context
+ obj_port = self._resource or self._get_resource(uuid)
+
+ # update fields used in unbind.
+ obj_port["accelerator_id"] = None
+ obj_port["bind_instance_id"] = None
+ obj_port["bind_port_id"] = None
+ obj_port["is_used"] = 0
+ obj_port["device_type"] = None
+
+ new_port = pecan.request.conductor_api.port_update(context, obj_port)
+ return Port.convert_with_links(new_port)
+
+
+class PortsController(PortsControllerBase):
+ """REST controller for Ports.
+ url path: /v2.0/ports/
+ """
+ bind = BindPortController()
+ unbind = UnBindPortController()
+
+ @policy.authorize_wsgi("cyborg:port", "create", False)
+ @expose.expose(Port, body=types.jsontype,
+ status_code=http_client.CREATED)
+ def post(self, port):
+ """Create a new port.
+
+ :param port: an port within the request body.
+ """
+ context = pecan.request.context
+ rpc_port = objects.Port(context, **port)
+ new_port = pecan.request.conductor_api.port_create(
+ context, rpc_port)
+ # Set the HTTP Location Header
+ pecan.response.location = link.build_url('ports',
+ new_port.uuid)
+ return Port.convert_with_links(new_port)
+
+ #@policy.authorize_wsgi("cyborg:port", "get")
+ @expose.expose(Port, types.uuid)
+ def get_one(self, uuid):
+ """Retrieve information about the given uuid port.
+ : param uuid: UUID of a port.
+ """
+ rpc_port = self._get_resource(uuid)
+ if rpc_port == None:
+ return pecan.abort(404, detail='The uuid Not Found.')
+ else:
+ return Port.convert_with_links(rpc_port)
+
+ @expose.expose(PortCollection, int, types.uuid, wtypes.text,
+ wtypes.text, types.boolean)
+ def get_all(self, limit = None, marker = None, sort_key='id',
+ sort_dir='asc'):
+ """Retrieve a list of ports.
+ : param limit: Optional, to determine the maximum number of
+ ports to return.
+ : param marker: Optional, to display a list of ports after
+ this marker.
+ : param sort_dir: Optional, to return a list of ports with this
+ sort direction.
+ : param all_tenants: Optional, allows administrators to see the
+ ports owned by all tenants, otherwise only the ports
+ associated with the calling tenant are included in the response."""
+
+ context = pecan.request.context
+ marker_obj = None;
+ if marker:
+ marker_obj = objects.Port.get(context, marker)
+
+ rpc_ports = objects.Port.list(
+ context, limit, marker_obj, sort_key, sort_dir)
+
+ return PortCollection.convert_with_links(rpc_ports)
+
+ #@policy.authorize_wsgi("cyborg:port", "update")
+ @expose.expose(Port, types.uuid, body=[PortPatchType])
+ def put(self, uuid, patch):
+ """Update an port's property.
+ : param uuid: UUID of a port.
+ : param patch: a json PATCH document to apply to this port.
+ """
+ obj_port = self._resource or self._get_resource(uuid)
+ try:
+ api_port = Port(**api_utils.apply_jsonpatch(obj_port.as_dict(), patch))
+ except api_utils.JSONPATCH_EXCEPTIONS as e:
+ raise exception.PatchError(patch=patch, reason=e)
+
+ #update only the fields that have changed.
+ for field in objects.Port.fields:
+ try:
+ patch_val = getattr(api_port, field)
+ except AttributeError:
+ # Ignore fields that aren't exposed in the API
+ continue
+
+ if patch_val == wtypes.Unset:
+ patch_val = None
+ if obj_port[field] != patch_val:
+ obj_port[field] = patch_val
+
+ context = pecan.request.context
+ new_port = pecan.request.conductor_api.port_update(context, obj_port)
+ return Port.convert_with_links(new_port)
+
+
+ #@policy.authorize_wsgi("cyborg:port", "delete")
+ @expose.expose(None, types.uuid, status_code=http_client.NO_CONTENT)
+ def delete(self, uuid):
+ """Delete a port.
+ :param uuid: UUID of the port."""
+
+ rpc_port = self._resource or self._get_resource(uuid)
+ if rpc_port == None:
+ status_code = http_client.NOT_FOUND
+ context = pecan.request.context
+ pecan.request.conductor_api.port_delete(context, rpc_port)
+
+
+
+
+
+
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/types.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/types.py
new file mode 100644
index 0000000..61ce387
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/types.py
@@ -0,0 +1,161 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import json
+
+from oslo_utils import uuidutils
+from wsme import types as wtypes
+import wsme
+
+import inspect
+from oslo_utils import strutils
+from cyborg.common.i18n import _
+from cyborg.common import exception
+
+
+class UUIDType(wtypes.UserType):
+ """A simple UUID type."""
+
+ basetype = wtypes.text
+ name = 'uuid'
+
+ @staticmethod
+ def validate(value):
+ if not uuidutils.is_uuid_like(value):
+ raise exception.InvalidUUID(uuid=value)
+ return value
+
+ @staticmethod
+ def frombasetype(value):
+ if value is None:
+ return None
+ return UUIDType.validate(value)
+
+
+class JsonType(wtypes.UserType):
+ """A simple JSON type."""
+
+ basetype = wtypes.text
+ name = 'json'
+
+ @staticmethod
+ def validate(value):
+ try:
+ json.dumps(value)
+ except TypeError:
+ raise exception.InvalidJsonType(value=value)
+ else:
+ return value
+
+ @staticmethod
+ def frombasetype(value):
+ return JsonType.validate(value)
+
+
+class BooleanType(wtypes.UserType):
+ """A simple boolean type."""
+
+ basetype = wtypes.text
+ name = 'boolean'
+
+ @staticmethod
+ def validate(value):
+ try:
+ return strutils.bool_from_string(value, strict=True)
+ except ValueError as e:
+ # raise Invalid to return 400 (BadRequest) in the API
+ raise exception.Invalid(e)
+
+ @staticmethod
+ def frombasetype(value):
+ if value is None:
+ return None
+ return BooleanType.validate(value)
+
+
+uuid = UUIDType()
+jsontype = JsonType()
+boolean = BooleanType()
+
+
+class JsonPatchType(wtypes.Base):
+ """A complex type that represents a single json-patch operation."""
+
+ path = wtypes.wsattr(wtypes.StringType(pattern='^(/[\w-]+)+$'),
+ mandatory=True)
+ op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'),
+ mandatory=True)
+ value = wtypes.wsattr(jsontype, default=wtypes.Unset)
+
+ # The class of the objects being patched. Override this in subclasses.
+ # Should probably be a subclass of cyborg.api.controllers.base.APIBase.
+ _api_base = None
+
+ # Attributes that are not required for construction, but which may not be
+ # removed if set. Override in subclasses if needed.
+ _extra_non_removable_attrs = set()
+
+ # Set of non-removable attributes, calculated lazily.
+ _non_removable_attrs = None
+
+ @staticmethod
+ def internal_attrs():
+ """Returns a list of internal attributes.
+
+ Internal attributes can't be added, replaced or removed. This
+ method may be overwritten by derived class.
+
+ """
+ return ['/created_at', '/id', '/links', '/updated_at', '/uuid']
+
+ @classmethod
+ def non_removable_attrs(cls):
+ """Returns a set of names of attributes that may not be removed.
+
+ Attributes whose 'mandatory' property is True are automatically added
+ to this set. To add additional attributes to the set, override the
+ field _extra_non_removable_attrs in subclasses, with a set of the form
+ {'/foo', '/bar'}.
+ """
+ if cls._non_removable_attrs is None:
+ cls._non_removable_attrs = cls._extra_non_removable_attrs.copy()
+ if cls._api_base:
+ fields = inspect.getmembers(cls._api_base,
+ lambda a: not inspect.isroutine(a))
+ for name, field in fields:
+ if getattr(field, 'mandatory', False):
+ cls._non_removable_attrs.add('/%s' % name)
+ return cls._non_removable_attrs
+
+ @staticmethod
+ def validate(patch):
+ _path = '/' + patch.path.split('/')[1]
+ if _path in patch.internal_attrs():
+ msg = _("'%s' is an internal attribute and can not be updated")
+ raise wsme.exc.ClientSideError(msg % patch.path)
+
+ if patch.path in patch.non_removable_attrs() and patch.op == 'remove':
+ msg = _("'%s' is a mandatory attribute and can not be removed")
+ raise wsme.exc.ClientSideError(msg % patch.path)
+
+ if patch.op != 'remove':
+ if patch.value is wsme.Unset:
+ msg = _("'add' and 'replace' operations need a value")
+ raise wsme.exc.ClientSideError(msg)
+
+ ret = {'path': patch.path, 'op': patch.op}
+ if patch.value is not wsme.Unset:
+ ret['value'] = patch.value
+ return ret
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/utils.py
new file mode 100644
index 0000000..6c88d3e
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/controllers/v1/utils.py
@@ -0,0 +1,35 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import jsonpatch
+import wsme
+
+
+from cyborg.common.i18n import _
+
+
+JSONPATCH_EXCEPTIONS = (jsonpatch.JsonPatchException,
+ jsonpatch.JsonPointerException,
+ KeyError)
+
+
+def apply_jsonpatch(doc, patch):
+ for p in patch:
+ if p['op'] == 'add' and p['path'].count('/') == 1:
+ if p['path'].lstrip('/') not in doc:
+ msg = _('Adding a new attribute (%s) to the root of '
+ ' the resource is not allowed')
+ raise wsme.exc.ClientSideError(msg % p['path'])
+ return jsonpatch.apply_patch(doc, jsonpatch.JsonPatch(patch)) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/expose.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/expose.py
new file mode 100644
index 0000000..bcc92f4
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/expose.py
@@ -0,0 +1,40 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import wsmeext.pecan as wsme_pecan
+from pecan import expose as p_expose
+
+default_kargs = {
+ 'template': 'json',
+ 'content_type': 'application/json'
+}
+
+def expose(*args, **kwargs):
+ """Ensure that only JSON, and not XML, is supported."""
+ if 'rest_content_types' not in kwargs:
+ kwargs['rest_content_types'] = ('json',)
+ return wsme_pecan.wsexpose(*args, **kwargs)
+
+def content_expose(*args, **kwargs):
+ """Helper function so we don't have to specify json for everything."""
+ kwargs.setdefault('template', default_kargs['template'])
+ kwargs.setdefault('content_type', default_kargs['content_type'])
+ return p_expose(*args, **kwargs)
+
+def when(index, *args, **kwargs):
+ """Helper function so we don't have to specify json for everything."""
+ kwargs.setdefault('template', default_kargs['template'])
+ kwargs.setdefault('content_type', default_kargs['content_type'])
+ return index.when(*args, **kwargs)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/hooks.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/hooks.py
new file mode 100644
index 0000000..6793982
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/hooks.py
@@ -0,0 +1,112 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+from oslo_context import context
+from pecan import hooks
+
+from cyborg.common import policy
+from cyborg.conductor import rpcapi
+
+
+class ConfigHook(hooks.PecanHook):
+ """Attach the config object to the request so controllers can get to it."""
+
+ def before(self, state):
+ state.request.cfg = cfg.CONF
+
+
+class PublicUrlHook(hooks.PecanHook):
+ """Attach the right public_url to the request.
+
+ Attach the right public_url to the request so resources can create
+ links even when the API service is behind a proxy or SSL terminator.
+ """
+
+ def before(self, state):
+ state.request.public_url = (
+ cfg.CONF.api.public_endpoint or state.request.host_url)
+
+
+class ConductorAPIHook(hooks.PecanHook):
+ """Attach the conductor_api object to the request."""
+
+ def __init__(self):
+ self.conductor_api = rpcapi.ConductorAPI()
+
+ def before(self, state):
+ state.request.conductor_api = self.conductor_api
+
+
+class ContextHook(hooks.PecanHook):
+ """Configures a request context and attaches it to the request.
+
+ The following HTTP request headers are used:
+
+ X-User-Id or X-User:
+ Used for context.user.
+
+ X-Tenant-Id or X-Tenant:
+ Used for context.tenant.
+
+ X-Auth-Token:
+ Used for context.auth_token.
+
+ X-Roles:
+ Used for setting context.is_admin flag to either True or False.
+ The flag is set to True, if X-Roles contains either an administrator
+ or admin substring. Otherwise it is set to False.
+
+ """
+
+ def __init__(self, public_api_routes):
+ self.public_api_routes = public_api_routes
+ super(ContextHook, self).__init__()
+
+ def before(self, state):
+ headers = state.request.headers
+ '''
+ creds = {
+ 'user_name': headers.get('X-User-Name'),
+ 'user': headers.get('X-User-Id'),
+ 'project_name': headers.get('X-Project-Name'),
+ 'tenant': headers.get('X-Project-Id'),
+ 'domain': headers.get('X-User-Domain-Id'),
+ 'domain_name': headers.get('X-User-Domain-Name'),
+ 'auth_token': headers.get('X-Auth-Token'),
+ 'roles': headers.get('X-Roles', '').split(','),
+ }'''
+
+ creds = {
+ 'user': headers.get('X-User-Id'),
+ 'tenant': headers.get('X-Project-Id'),
+ 'domain': headers.get('X-User-Domain-Id',''),
+ 'auth_token': headers.get('X-Auth-Token'),
+ 'roles': headers.get('X-Roles', '').split(','),
+ }
+
+
+ is_admin = policy.authorize('is_admin', creds, creds)
+ state.request.context = context.RequestContext(
+ is_admin=is_admin, **creds)
+
+ def after(self, state):
+ if state.request.context == {}:
+ # An incorrect url path will not create RequestContext
+ return
+ # RequestContext will generate a request_id if no one
+ # passing outside, so it always contain a request_id.
+ request_id = state.request.context.request_id
+ state.response.headers['Openstack-Request-Id'] = request_id
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/__init__.py
new file mode 100644
index 0000000..95cc740
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cyborg.api.middleware import auth_token
+from cyborg.api.middleware import parsable_error
+
+
+ParsableErrorMiddleware = parsable_error.ParsableErrorMiddleware
+AuthTokenMiddleware = auth_token.AuthTokenMiddleware
+
+__all__ = ('ParsableErrorMiddleware',
+ 'AuthTokenMiddleware')
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/auth_token.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/auth_token.py
new file mode 100644
index 0000000..95b5323
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/auth_token.py
@@ -0,0 +1,64 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import re
+
+from keystonemiddleware import auth_token
+from oslo_log import log
+
+from cyborg.common import exception
+from cyborg.common.i18n import _
+from cyborg.common import utils
+
+
+LOG = log.getLogger(__name__)
+
+
+class AuthTokenMiddleware(auth_token.AuthProtocol):
+ """A wrapper on Keystone auth_token middleware.
+
+ Does not perform verification of authentication tokens
+ for public routes in the API.
+
+ """
+ def __init__(self, app, conf, public_api_routes=None):
+ public_api_routes = public_api_routes or []
+ self.app = app
+ route_pattern_tpl = '%s(\.json)?$'
+
+ try:
+ self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
+ for route_tpl in public_api_routes]
+ except re.error as e:
+ msg = _('Cannot compile public API routes: %s') % e
+
+ LOG.error(msg)
+ raise exception.ConfigInvalid(error_msg=msg)
+
+ super(AuthTokenMiddleware, self).__init__(app, conf)
+
+ def __call__(self, env, start_response):
+ path = utils.safe_rstrip(env.get('PATH_INFO'), '/')
+
+ # The information whether the API call is being performed against the
+ # public API is required for some other components. Saving it to the
+ # WSGI environment is reasonable thereby.
+ env['is_public_api'] = any(map(lambda pattern: re.match(pattern, path),
+ self.public_api_routes))
+
+ if env['is_public_api']:
+ return self.app(env, start_response)
+
+ return super(AuthTokenMiddleware, self).__call__(env, start_response)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/parsable_error.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/parsable_error.py
new file mode 100644
index 0000000..ba80c22
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/api/middleware/parsable_error.py
@@ -0,0 +1,72 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Middleware to replace the plain text message body of an error
+response with one formatted so the client can parse it.
+
+Based on pecan.middleware.errordocument
+"""
+
+import json
+
+import six
+
+
+class ParsableErrorMiddleware(object):
+ """Replace error body with something the client can parse."""
+ def __init__(self, app):
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ # Request for this state, modified by replace_start_response()
+ # and used when an error is being reported.
+ state = {}
+
+ def replacement_start_response(status, headers, exc_info=None):
+ """Overrides the default response to make errors parsable."""
+ try:
+ status_code = int(status.split(' ')[0])
+ state['status_code'] = status_code
+ except (ValueError, TypeError): # pragma: nocover
+ raise Exception(
+ 'ParsableErrorMiddleware received an invalid '
+ 'status %s' % status)
+
+ if (state['status_code'] // 100) not in (2, 3):
+ # Remove some headers so we can replace them later
+ # when we have the full error message and can
+ # compute the length.
+ headers = [
+ (h, v) for (h, v) in headers
+ if h not in ('Content-Length', 'Content-Type')]
+
+ # Save the headers in case we need to modify them.
+ state['headers'] = headers
+ return start_response(status, headers, exc_info)
+
+ app_iter = self.app(environ, replacement_start_response)
+
+ if (state['status_code'] // 100) not in (2, 3):
+ if six.PY3:
+ app_iter = [i.decode('utf-8') for i in app_iter]
+ body = [json.dumps({'error_message': '\n'.join(app_iter)})]
+ if six.PY3:
+ body = [i.encode('utf-8') for i in body]
+ state['headers'].append(('Content-Type', 'application/json'))
+ state['headers'].append(('Content-Length', str(len(body[0]))))
+ else:
+ body = app_iter
+ return body
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/__init__.py
new file mode 100644
index 0000000..b90e7b3
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/__init__.py
@@ -0,0 +1,19 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import oslo_i18n as i18n
+
+
+i18n.install('cyborg')
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/agent.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/agent.py
new file mode 100644
index 0000000..8663bd1
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/agent.py
@@ -0,0 +1,37 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""The Cyborg Agent Service."""
+
+import sys
+
+from oslo_config import cfg
+from oslo_service import service
+
+from cyborg.common import constants
+from cyborg.common import service as cyborg_service
+
+
+CONF = cfg.CONF
+
+
+def main():
+ # Parse config file and command line options, then start logging
+ cyborg_service.prepare_service(sys.argv)
+
+ mgr = cyborg_service.RPCService('cyborg.agent.manager',
+ 'AgentManager',
+ constants.AGENT_TOPIC)
+
+ launcher = service.launch(CONF, mgr)
+ launcher.wait()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/api.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/api.py
new file mode 100644
index 0000000..7199e7b
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/api.py
@@ -0,0 +1,36 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""The Cyborg Service API."""
+
+import sys
+
+from oslo_config import cfg
+
+from cyborg.common import service as cyborg_service
+
+
+CONF = cfg.CONF
+
+
+def main():
+ # Parse config file and command line options, then start logging
+ cyborg_service.prepare_service(sys.argv)
+
+ # Build and start the WSGI app
+ launcher = cyborg_service.process_launcher()
+ server = cyborg_service.WSGIService('cyborg_api', CONF.api.enable_ssl_api)
+ launcher.launch_service(server, workers=server.workers)
+ launcher.wait()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/conductor.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/conductor.py
new file mode 100644
index 0000000..eb04dc2
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/conductor.py
@@ -0,0 +1,39 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""The Cyborg Conductor Service."""
+
+import sys
+
+from oslo_config import cfg
+from oslo_service import service
+
+from cyborg.common import constants
+from cyborg.common import service as cyborg_service
+
+
+CONF = cfg.CONF
+
+
+def main():
+ # Parse config file and command line options, then start logging
+ cyborg_service.prepare_service(sys.argv)
+
+ mgr = cyborg_service.RPCService('cyborg.conductor.manager',
+ 'ConductorManager',
+ constants.CONDUCTOR_TOPIC)
+
+ launcher = service.launch(CONF, mgr)
+ launcher.wait()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/dbsync.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/dbsync.py
new file mode 100644
index 0000000..08facbf
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/cmd/dbsync.py
@@ -0,0 +1,91 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Run storage database migration.
+"""
+
+import sys
+
+from oslo_config import cfg
+
+from cyborg.common.i18n import _
+from cyborg.common import service
+from cyborg.conf import CONF
+from cyborg.db import migration
+
+
+class DBCommand(object):
+
+ def upgrade(self):
+ migration.upgrade(CONF.command.revision)
+
+ def revision(self):
+ migration.revision(CONF.command.message, CONF.command.autogenerate)
+
+ def stamp(self):
+ migration.stamp(CONF.command.revision)
+
+ def version(self):
+ print(migration.version())
+
+ def create_schema(self):
+ migration.create_schema()
+
+
+def add_command_parsers(subparsers):
+ command_object = DBCommand()
+
+ parser = subparsers.add_parser(
+ 'upgrade',
+ help=_("Upgrade the database schema to the latest version. "
+ "Optionally, use --revision to specify an alembic revision "
+ "string to upgrade to."))
+ parser.set_defaults(func=command_object.upgrade)
+ parser.add_argument('--revision', nargs='?')
+
+ parser = subparsers.add_parser(
+ 'revision',
+ help=_("Create a new alembic revision. "
+ "Use --message to set the message string."))
+ parser.set_defaults(func=command_object.revision)
+ parser.add_argument('-m', '--message')
+ parser.add_argument('--autogenerate', action='store_true')
+
+ parser = subparsers.add_parser('stamp')
+ parser.set_defaults(func=command_object.stamp)
+ parser.add_argument('--revision', nargs='?')
+
+ parser = subparsers.add_parser(
+ 'version',
+ help=_("Print the current version information and exit."))
+ parser.set_defaults(func=command_object.version)
+
+ parser = subparsers.add_parser(
+ 'create_schema',
+ help=_("Create the database schema."))
+ parser.set_defaults(func=command_object.create_schema)
+
+
+def main():
+ command_opt = cfg.SubCommandOpt('command',
+ title='Command',
+ help=_('Available commands'),
+ handler=add_command_parsers)
+
+ CONF.register_cli_opt(command_opt)
+
+ service.prepare_service(sys.argv)
+ CONF.command.func()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/config.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/config.py
new file mode 100644
index 0000000..954e36d
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/config.py
@@ -0,0 +1,29 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+
+from cyborg.common import rpc
+from cyborg import version
+
+
+def parse_args(argv, default_config_files=None):
+ rpc.set_defaults(control_exchange='cyborg')
+ version_string = version.version_info.release_string()
+ cfg.CONF(argv[1:],
+ project='cyborg',
+ version=version_string,
+ default_config_files=default_config_files)
+ rpc.init(cfg.CONF)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/constants.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/constants.py
new file mode 100644
index 0000000..c9c7f98
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/constants.py
@@ -0,0 +1,18 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+CONDUCTOR_TOPIC = 'cyborg-conductor'
+AGENT_TOPIC = 'cyborg-agent'
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/exception.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/exception.py
new file mode 100644
index 0000000..768e0b7
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/exception.py
@@ -0,0 +1,202 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Cyborg base exception handling.
+
+SHOULD include dedicated exception logging.
+
+"""
+
+from oslo_log import log
+import six
+from six.moves import http_client
+
+from cyborg.common.i18n import _
+from cyborg.conf import CONF
+
+
+LOG = log.getLogger(__name__)
+
+
+class CyborgException(Exception):
+ """Base Cyborg Exception
+
+ To correctly use this class, inherit from it and define
+ a '_msg_fmt' property. That message will get printf'd
+ with the keyword arguments provided to the constructor.
+
+ If you need to access the message from an exception you should use
+ six.text_type(exc)
+
+ """
+ _msg_fmt = _("An unknown exception occurred.")
+ code = http_client.INTERNAL_SERVER_ERROR
+ headers = {}
+ safe = False
+
+ def __init__(self, message=None, **kwargs):
+ self.kwargs = kwargs
+
+ if 'code' not in self.kwargs:
+ try:
+ self.kwargs['code'] = self.code
+ except AttributeError:
+ pass
+
+ if not message:
+ try:
+ message = self._msg_fmt % kwargs
+ except Exception:
+ # kwargs doesn't match a variable in self._msg_fmt
+ # log the issue and the kwargs
+ LOG.exception('Exception in string format operation')
+ for name, value in kwargs.items():
+ LOG.error("%s: %s" % (name, value))
+
+ if CONF.fatal_exception_format_errors:
+ raise
+ else:
+ # at least get the core self._msg_fmt out if something
+ # happened
+ message = self._msg_fmt
+
+ super(CyborgException, self).__init__(message)
+
+ def __str__(self):
+ """Encode to utf-8 then wsme api can consume it as well."""
+ if not six.PY3:
+ return unicode(self.args[0]).encode('utf-8')
+
+ return self.args[0]
+
+ def __unicode__(self):
+ """Return a unicode representation of the exception message."""
+ return unicode(self.args[0])
+
+
+class ConfigInvalid(CyborgException):
+ _msg_fmt = _("Invalid configuration file. %(error_msg)s")
+
+
+class AcceleratorAlreadyExists(CyborgException):
+ _msg_fmt = _("Accelerator with uuid %(uuid)s already exists.")
+
+
+class Invalid(CyborgException):
+ _msg_fmt = _("Invalid parameters.")
+ code = http_client.BAD_REQUEST
+
+
+class InvalidIdentity(Invalid):
+ _msg_fmt = _("Expected a uuid/id but received %(identity)s.")
+
+
+class InvalidUUID(Invalid):
+ _msg_fmt = _("Expected a uuid but received %(uuid)s.")
+
+
+class InvalidJsonType(Invalid):
+ _msg_fmt = _("%(value)s is not JSON serializable.")
+
+
+# Cannot be templated as the error syntax varies.
+# msg needs to be constructed when raised.
+class InvalidParameterValue(Invalid):
+ _msg_fmt = _("%(err)s")
+
+
+class PatchError(Invalid):
+ _msg_fmt = _("Couldn't apply patch '%(patch)s'. Reason: %(reason)s")
+
+
+class NotAuthorized(CyborgException):
+ _msg_fmt = _("Not authorized.")
+ code = http_client.FORBIDDEN
+
+
+class HTTPForbidden(NotAuthorized):
+ _msg_fmt = _("Access was denied to the following resource: %(resource)s")
+
+
+class NotFound(CyborgException):
+ _msg_fmt = _("Resource could not be found.")
+ code = http_client.NOT_FOUND
+
+
+class AcceleratorNotFound(NotFound):
+ _msg_fmt = _("Accelerator %(uuid)s could not be found.")
+
+
+class Conflict(CyborgException):
+ _msg_fmt = _('Conflict.')
+ code = http_client.CONFLICT
+
+
+class DuplicateName(Conflict):
+ _msg_fmt = _("An accelerator with name %(name)s already exists.")
+
+
+class PortAlreadyExists(CyborgException):
+ _msg_fmt = _("Port with uuid %(uuid)s already exists.")
+
+
+class PortNotFound(NotFound):
+ _msg_fmt = _("Port %(uuid)s could not be found.")
+
+
+class PortDuplicateName(Conflict):
+ _msg_fmt = _("An port with name %(name)s already exists.")
+
+
+class PlacementEndpointNotFound(NotFound):
+ message = _("Placement API endpoint not found")
+
+
+class PlacementResourceProviderNotFound(NotFound):
+ message = _("Placement resource provider not found %(resource_provider)s.")
+
+
+class PlacementInventoryNotFound(NotFound):
+ message = _("Placement inventory not found for resource provider "
+ "%(resource_provider)s, resource class %(resource_class)s.")
+
+
+class PlacementInventoryUpdateConflict(Conflict):
+ message = _("Placement inventory update conflict for resource provider "
+ "%(resource_provider)s, resource class %(resource_class)s.")
+
+#deployable table
+class DeployableNotFound(NotFound):
+ _msg_fmt = _("Deployable %(uuid)s could not be found.")
+
+class DuplicateDeployableName(Conflict):
+ _msg_fmt = _("A deployable with name %(name)s already exists.")
+
+class InvalidDeployableType(CyborgException):
+ _msg_fmt = _("Deployable have an invalid type.")
+
+class ObjectActionError(CyborgException):
+ _msg_fmt = _('Object action %(action)s failed because: %(reason)s')
+
+# attribute table
+class AttributeNotFound(NotFound):
+ _msg_fmt = _("Attribute %(uuid)s could not be found.")
+
+class AttributeInvalid(CyborgException):
+ _msg_fmt = _("Attribute is invalid.")
+
+class AttributeAlreadyExists(CyborgException):
+ _msg_fmt = _("Attribute with uuid %(uuid)s already exists.")
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/i18n.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/i18n.py
new file mode 100644
index 0000000..eb5c313
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/i18n.py
@@ -0,0 +1,22 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import oslo_i18n as i18n
+
+
+_translators = i18n.TranslatorFactory(domain='cyborg')
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/paths.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/paths.py
new file mode 100644
index 0000000..38d2411
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/paths.py
@@ -0,0 +1,48 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+from cyborg.conf import CONF
+
+
+def basedir_def(*args):
+ """Return an uninterpolated path relative to $pybasedir."""
+ return os.path.join('$pybasedir', *args)
+
+
+def bindir_def(*args):
+ """Return an uninterpolated path relative to $bindir."""
+ return os.path.join('$bindir', *args)
+
+
+def state_path_def(*args):
+ """Return an uninterpolated path relative to $state_path."""
+ return os.path.join('$state_path', *args)
+
+
+def basedir_rel(*args):
+ """Return a path relative to $pybasedir."""
+ return os.path.join(CONF.pybasedir, *args)
+
+
+def bindir_rel(*args):
+ """Return a path relative to $bindir."""
+ return os.path.join(CONF.bindir, *args)
+
+
+def state_path_rel(*args):
+ """Return a path relative to $state_path."""
+ return os.path.join(CONF.state_path, *args)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/policy.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/policy.py
new file mode 100644
index 0000000..846f046
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/policy.py
@@ -0,0 +1,265 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Policy Engine For Cyborg."""
+
+import functools
+import sys
+
+from oslo_concurrency import lockutils
+from oslo_config import cfg
+from oslo_log import log
+from oslo_policy import policy
+from oslo_versionedobjects import base as object_base
+import pecan
+import wsme
+
+from cyborg.common import exception
+
+
+_ENFORCER = None
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+
+
+default_policies = [
+ # Legacy setting, don't remove. Likely to be overridden by operators who
+ # forget to update their policy.json configuration file.
+ # This gets rolled into the new "is_admin" rule below.
+ # comment by bob. There is no RuleDefault class in policy of mitaka release.
+ '''
+ policy.RuleDefault('admin_api',
+ 'role:admin or role:administrator',
+ description='Legacy rule for cloud admin access'),
+ # is_public_api is set in the environment from AuthTokenMiddleware
+ policy.RuleDefault('public_api',
+ 'is_public_api:True',
+ description='Internal flag for public API routes'),
+ # The policy check "@" will always accept an access. The empty list
+ # (``[]``) or the empty string (``""``) is equivalent to the "@"
+ policy.RuleDefault('allow',
+ '@',
+ description='any access will be passed'),
+ # the policy check "!" will always reject an access.
+ policy.RuleDefault('deny',
+ '!',
+ description='all access will be forbidden'),
+ policy.RuleDefault('is_admin',
+ 'rule:admin_api',
+ description='Full read/write API access'),
+ policy.RuleDefault('admin_or_owner',
+ 'is_admin:True or project_id:%(project_id)s',
+ description='Admin or owner API access'),
+ policy.RuleDefault('admin_or_user',
+ 'is_admin:True or user_id:%(user_id)s',
+ description='Admin or user API access'),
+ policy.RuleDefault('default',
+ 'rule:admin_or_owner',
+ description='Default API access rule'),
+ '''
+]
+
+# NOTE: to follow policy-in-code spec, we define defaults for
+# the granular policies in code, rather than in policy.json.
+# All of these may be overridden by configuration, but we can
+# depend on their existence throughout the code.
+
+accelerator_policies = [
+ # comment by bob. There is no RuleDefault class in policy of mitaka release.
+ '''
+ policy.RuleDefault('cyborg:accelerator:get',
+ 'rule:default',
+ description='Retrieve accelerator records'),
+ policy.RuleDefault('cyborg:accelerator:create',
+ 'rule:allow',
+ description='Create accelerator records'),
+ policy.RuleDefault('cyborg:accelerator:delete',
+ 'rule:default',
+ description='Delete accelerator records'),
+ policy.RuleDefault('cyborg:accelerator:update',
+ 'rule:default',
+ description='Update accelerator records'),
+ '''
+
+]
+
+deployable_policies = [
+ '''
+ policy.RuleDefault('cyborg:deployable:get_one',
+ 'rule:allow',
+ description='Show deployable detail'),
+ policy.RuleDefault('cyborg:deployable:get_all',
+ 'rule:allow',
+ description='Retrieve all deployable records'),
+ policy.RuleDefault('cyborg:deployable:create',
+ 'rule:admin_api',
+ description='Create deployable records'),
+ policy.RuleDefault('cyborg:deployable:delete',
+ 'rule:admin_api',
+ description='Delete deployable records'),
+ policy.RuleDefault('cyborg:deployable:update',
+ 'rule:admin_api',
+ description='Update deployable records'),
+ '''
+]
+
+
+def list_policies():
+ return default_policies + accelerator_policies + deployable_policies
+
+
+@lockutils.synchronized('policy_enforcer', 'cyborg-')
+def init_enforcer(policy_file=None, rules=None,
+ default_rule=None, use_conf=True):
+ """Synchronously initializes the policy enforcer
+
+ :param policy_file: Custom policy file to use, if none is specified,
+ `CONF.oslo_policy.policy_file` will be used.
+ :param rules: Default dictionary / Rules to use. It will be
+ considered just in the first instantiation.
+ :param default_rule: Default rule to use,
+ CONF.oslo_policy.policy_default_rule will
+ be used if none is specified.
+ :param use_conf: Whether to load rules from config file.
+
+ """
+ global _ENFORCER
+
+ if _ENFORCER:
+ return
+
+ # NOTE: Register defaults for policy-in-code here so that they are
+ # loaded exactly once - when this module-global is initialized.
+ # Defining these in the relevant API modules won't work
+ # because API classes lack singletons and don't use globals.
+ _ENFORCER = policy.Enforcer(CONF, policy_file=policy_file,
+ rules=rules,
+ default_rule=default_rule,
+ use_conf=use_conf)
+
+ # no register_defaults method. by bob
+ #_ENFORCER.register_defaults(list_policies())
+
+
+def get_enforcer():
+ """Provides access to the single accelerator of policy enforcer."""
+ global _ENFORCER
+
+ if not _ENFORCER:
+ init_enforcer()
+
+ return _ENFORCER
+
+
+# NOTE: We can't call these methods from within decorators because the
+# 'target' and 'creds' parameter must be fetched from the call time
+# context-local pecan.request magic variable, but decorators are compiled
+# at module-load time.
+
+
+def authorize(rule, target, creds, do_raise=False, *args, **kwargs):
+ """A shortcut for policy.Enforcer.authorize()
+
+ Checks authorization of a rule against the target and credentials, and
+ raises an exception if the rule is not defined.
+ """
+ enforcer = get_enforcer()
+ try:
+ # no authorize. comment by bob
+ #return enforcer.authorize(rule, target, creds, do_raise=do_raise,
+ return enforcer.enforce(rule, target, creds, do_raise=do_raise,
+ *args, **kwargs)
+ except policy.PolicyNotAuthorized:
+ raise exception.HTTPForbidden(resource=rule)
+
+
+# This decorator MUST appear first (the outermost decorator)
+# on an API method for it to work correctly
+def authorize_wsgi(api_name, act=None, need_target=True):
+ """This is a decorator to simplify wsgi action policy rule check.
+
+ :param api_name: The collection name to be evaluate.
+ :param act: The function name of wsgi action.
+ :param need_target: Whether need target for authorization. Such as,
+ when create some resource , maybe target is not needed.
+
+ example:
+ from cyborg.common import policy
+ class AcceleratorController(rest.RestController):
+ ....
+ @policy.authorize_wsgi("cyborg:accelerator", "create", False)
+ @wsme_pecan.wsexpose(Accelerator, body=types.jsontype,
+ status_code=http_client.CREATED)
+ def post(self, values):
+ ...
+ """
+ def wraper(fn):
+ action = '%s:%s' % (api_name, act or fn.__name__)
+
+ # In this authorize method, we return a dict data when authorization
+ # fails or exception comes out. Maybe we can consider to use
+ # wsme.api.Response in future.
+ def return_error(resp_status):
+ exception_info = sys.exc_info()
+ orig_exception = exception_info[1]
+ orig_code = getattr(orig_exception, 'code', None)
+ pecan.response.status = orig_code or resp_status
+ data = wsme.api.format_exception(
+ exception_info,
+ pecan.conf.get('wsme', {}).get('debug', False)
+ )
+ del exception_info
+ return data
+
+ @functools.wraps(fn)
+ def handle(self, *args, **kwargs):
+ context = pecan.request.context
+ credentials = context.to_policy_values()
+ credentials['is_admin'] = context.is_admin
+ target = {}
+ # maybe we can pass "_get_resource" to authorize_wsgi
+ if need_target and hasattr(self, "_get_resource"):
+ try:
+ resource = getattr(self, "_get_resource")(*args, **kwargs)
+ # just support object, other type will just keep target as
+ # empty, then follow authorize method will fail and throw
+ # an exception
+ if isinstance(resource,
+ object_base.VersionedObjectDictCompat):
+ target = {'project_id': resource.project_id,
+ 'user_id': resource.user_id}
+ except Exception:
+ return return_error(500)
+ elif need_target:
+ # if developer do not set _get_resource, just set target as
+ # empty, then follow authorize method will fail and throw an
+ # exception
+ target = {}
+ else:
+ # for create method, before resource exsites, we can check the
+ # the credentials with itself.
+ target = {'project_id': context.tenant,
+ 'user_id': context.user}
+
+ try:
+ authorize(action, target, credentials, do_raise=True)
+ except Exception:
+ return return_error(403)
+
+ return fn(self, *args, **kwargs)
+
+ return handle
+
+ return wraper
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/rpc.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/rpc.py
new file mode 100644
index 0000000..02ff5ec
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/rpc.py
@@ -0,0 +1,123 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+from oslo_context import context as cyborg_context
+import oslo_messaging as messaging
+from oslo_messaging.rpc import dispatcher
+
+from cyborg.common import exception
+
+
+CONF = cfg.CONF
+TRANSPORT = None
+NOTIFICATION_TRANSPORT = None
+NOTIFIER = None
+
+ALLOWED_EXMODS = [
+ exception.__name__,
+]
+EXTRA_EXMODS = []
+
+
+def init(conf):
+ global TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER
+ exmods = get_allowed_exmods()
+ #TRANSPORT = messaging.get_rpc_transport(conf,
+ TRANSPORT = messaging.get_transport(conf,
+ allowed_remote_exmods=exmods)
+ NOTIFICATION_TRANSPORT = messaging.get_notification_transport(
+ conf,
+ allowed_remote_exmods=exmods)
+ serializer = RequestContextSerializer(messaging.JsonPayloadSerializer())
+ NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
+ serializer=serializer,
+ topics=['notifications'])
+
+
+def cleanup():
+ global TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER
+ assert TRANSPORT is not None
+ assert NOTIFICATION_TRANSPORT is not None
+ assert NOTIFIER is not None
+ TRANSPORT.cleanup()
+ NOTIFICATION_TRANSPORT.cleanup()
+ TRANSPORT = NOTIFICATION_TRANSPORT = NOTIFIER = None
+
+
+def set_defaults(control_exchange):
+ messaging.set_transport_defaults(control_exchange)
+
+
+def add_extra_exmods(*args):
+ EXTRA_EXMODS.extend(args)
+
+
+def clear_extra_exmods():
+ del EXTRA_EXMODS[:]
+
+
+def get_allowed_exmods():
+ return ALLOWED_EXMODS + EXTRA_EXMODS
+
+
+class RequestContextSerializer(messaging.Serializer):
+ def __init__(self, base):
+ self._base = base
+
+ def serialize_entity(self, context, entity):
+ if not self._base:
+ return entity
+ return self._base.serialize_entity(context, entity)
+
+ def deserialize_entity(self, context, entity):
+ if not self._base:
+ return entity
+ return self._base.deserialize_entity(context, entity)
+
+ def serialize_context(self, context):
+ return context.to_dict()
+
+ def deserialize_context(self, context):
+ return cyborg_context.RequestContext.from_dict(context)
+
+
+def get_client(target, version_cap=None, serializer=None):
+ assert TRANSPORT is not None
+ serializer = RequestContextSerializer(serializer)
+ return messaging.RPCClient(TRANSPORT,
+ target,
+ version_cap=version_cap,
+ serializer=serializer)
+
+
+def get_server(target, endpoints, serializer=None):
+ assert TRANSPORT is not None
+ # comment by bob
+ #access_policy = dispatcher.DefaultRPCAccessPolicy
+ serializer = RequestContextSerializer(serializer)
+ return messaging.get_rpc_server(TRANSPORT,
+ target,
+ endpoints,
+ executor='eventlet',
+ serializer=serializer)
+ #access_policy=access_policy)
+
+
+def get_notifier(service=None, host=None, publisher_id=None):
+ assert NOTIFIER is not None
+ if not publisher_id:
+ publisher_id = "%s.%s" % (service, host or CONF.host)
+ return NOTIFIER.prepare(publisher_id=publisher_id)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/service.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/service.py
new file mode 100755
index 0000000..2eaeac9
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/service.py
@@ -0,0 +1,145 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_concurrency import processutils
+from oslo_context import context
+from oslo_log import log
+import oslo_messaging as messaging
+from oslo_service import service
+from oslo_service import wsgi
+from oslo_utils import importutils
+
+from cyborg.api import app
+from cyborg.common import config
+from cyborg.common import exception
+from cyborg.common.i18n import _
+from cyborg.common import rpc
+from cyborg.conf import CONF
+from cyborg import objects
+from cyborg.objects import base as objects_base
+
+
+LOG = log.getLogger(__name__)
+
+
+class RPCService(service.Service):
+ def __init__(self, manager_module, manager_class, topic, host=None):
+ super(RPCService, self).__init__()
+ self.topic = topic
+ self.host = host or CONF.host
+ manager_module = importutils.try_import(manager_module)
+ manager_class = getattr(manager_module, manager_class)
+ self.manager = manager_class(self.topic, self.host)
+ self.rpcserver = None
+
+ def start(self):
+ super(RPCService, self).start()
+
+ target = messaging.Target(topic=self.topic, server=self.host)
+ endpoints = [self.manager]
+ serializer = objects_base.CyborgObjectSerializer()
+ self.rpcserver = rpc.get_server(target, endpoints, serializer)
+ self.rpcserver.start()
+
+ admin_context = context.get_admin_context()
+ self.tg.add_dynamic_timer(
+ self.manager.periodic_tasks,
+ periodic_interval_max=CONF.periodic_interval,
+ context=admin_context)
+
+ LOG.info('Created RPC server for service %(service)s on host '
+ '%(host)s.',
+ {'service': self.topic, 'host': self.host})
+
+ def stop(self, graceful=True):
+ try:
+ self.rpcserver.stop()
+ self.rpcserver.wait()
+ except Exception as e:
+ LOG.exception('Service error occurred when stopping the '
+ 'RPC server. Error: %s', e)
+
+ super(RPCService, self).stop(graceful=graceful)
+ LOG.info('Stopped RPC server for service %(service)s on host '
+ '%(host)s.',
+ {'service': self.topic, 'host': self.host})
+
+
+def prepare_service(argv=None):
+ log.register_options(CONF)
+ log.set_defaults(default_log_levels=CONF.default_log_levels)
+
+ argv = argv or []
+ config.parse_args(argv)
+
+ log.setup(CONF, 'cyborg')
+ objects.register_all()
+
+
+def process_launcher():
+ return service.ProcessLauncher(CONF)
+
+
+class WSGIService(service.ServiceBase):
+ """Provides ability to launch cyborg API from wsgi app."""
+
+ def __init__(self, name, use_ssl=False):
+ """Initialize, but do not start the WSGI server.
+
+ :param name: The name of the WSGI server given to the loader.
+ :param use_ssl: Wraps the socket in an SSL context if True.
+ :returns: None
+ """
+ self.name = name
+ self.app = app.VersionSelectorApplication()
+ self.workers = (CONF.api.api_workers or
+ processutils.get_worker_count())
+ if self.workers and self.workers < 1:
+ raise exception.ConfigInvalid(
+ _("api_workers value of %d is invalid, "
+ "must be greater than 0.") % self.workers)
+
+ self.server = wsgi.Server(CONF, self.name, self.app,
+ host=CONF.api.host_ip,
+ port=CONF.api.port,
+ use_ssl=use_ssl)
+
+ def start(self):
+ """Start serving this service using loaded configuration.
+
+ :returns: None
+ """
+ self.server.start()
+
+ def stop(self):
+ """Stop serving this API.
+
+ :returns: None
+ """
+ self.server.stop()
+
+ def wait(self):
+ """Wait for the service to stop serving this API.
+
+ :returns: None
+ """
+ self.server.wait()
+
+ def reset(self):
+ """Reset server greenpool size to default.
+
+ :returns: None
+ """
+ self.server.reset()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/utils.py
new file mode 100644
index 0000000..0b97327
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/common/utils.py
@@ -0,0 +1,41 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Utilities and helper functions."""
+
+from oslo_log import log
+from oslo_concurrency import lockutils
+import six
+
+
+LOG = log.getLogger(__name__)
+
+synchronized = lockutils.synchronized_with_prefix('cyborg-')
+
+def safe_rstrip(value, chars=None):
+ """Removes trailing characters from a string if that does not make it empty
+
+ :param value: A string value that will be stripped.
+ :param chars: Characters to remove.
+ :return: Stripped value.
+
+ """
+ if not isinstance(value, six.string_types):
+ LOG.warning("Failed to remove trailing character. Returning "
+ "original object. Supplied object is not a string: "
+ "%s,", value)
+ return value
+
+ return value.rstrip(chars) or value
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/handlers.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/handlers.py
new file mode 100644
index 0000000..27da4e7
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/handlers.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+class NotificationEndpoint(object):
+ # filter_rule = messaging.NotificationFilter(publisher_id='^cyborg.*')
+
+ # We have an update from an agent and we need to add it to our in memory
+ # cache of accelerator objects and schedule a flush to the database
+ def update(self, ctxt, publisher_id, event_type, payload, metadata):
+ print("Got update")
+ return True
+
+ # We have an info message from an agent, anything that wouldn't
+ # go into the db but needs to be communicated goes here
+ def info(self, ctxt, publisher_id, event_type, payload, metadata):
+ print("Got info")
+ return True
+
+ # We have a warning from an agent, we may take some action
+ def warn(self, ctxt, publisher_id, event_type, payload, metadata):
+ print("Got warn")
+ return True
+
+ # We have an error from an agent, we must take some action
+ def error(self, ctxt, publisher_id, event_type, payload, metadata):
+ print("Got error")
+ return True
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/manager.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/manager.py
new file mode 100644
index 0000000..d4199b1
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/manager.py
@@ -0,0 +1,180 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import oslo_messaging as messaging
+
+from cyborg import objects
+from cyborg.conf import CONF
+
+
+from oslo_log import log as logging
+
+
+LOG = logging.getLogger(__name__)
+
+class ConductorManager(object):
+ """Cyborg Conductor manager main class."""
+
+ RPC_API_VERSION = '1.0'
+ target = messaging.Target(version=RPC_API_VERSION)
+
+ def __init__(self, topic, host=None):
+ super(ConductorManager, self).__init__()
+ self.topic = topic
+ self.host = host or CONF.host
+
+ def periodic_tasks(self, context, raise_on_error=False):
+ pass
+
+ def accelerator_create(self, context, obj_acc):
+ """Create a new accelerator.
+
+ :param context: request context.
+ :param obj_acc: a changed (but not saved) accelerator object.
+ :returns: created accelerator object.
+ """
+ base_options={
+ 'project_id' : context.tenant,
+ 'user_id' : context.user
+ }
+ obj_acc.update(base_options)
+ obj_acc.create(context)
+ return obj_acc
+
+ def accelerator_update(self, context, acc_obj):
+ """Update an accelerator.
+ :param context: request context.
+ :param acc_obj: an accelerator object to update.
+ :return: updated accelerator objects."""
+
+ acc_obj.save(context)
+ return acc_obj
+
+ def accelerator_delete(self, context, acc_obj):
+ """Delete an accelerator.
+
+ :param context: request context.
+ :param acc_obj: an accelerator object to delete."""
+
+ acc_obj.destory(context)
+
+
+ def port_create(self, context, port_obj):
+ """Create a new port.
+
+ :param context: request context.
+ :param port_obj: a changed (but not saved) port object.
+ :returns: created port object.
+ """
+ port_obj.create(context)
+ return port_obj
+
+ def port_bulk_create(self, context, port_list):
+ """Create a new port.
+
+ :param context: request context.
+ :param port_list: port list need to be create and save.
+ :returns: request result.
+ """
+ try:
+ for port in list(port_list):
+ port_obj = objects.Port(context, **port)
+ port = self.check_port_exist(context, port_obj)
+ if not port:
+ port_obj.create(context)
+
+ LOG.info('Update port resource %s ' % (port_list))
+ return True
+ except Exception as e:
+ LOG.error("Failed to port bulk create with error: %s" % (e))
+ LOG.error("Failed to port bulk create: %s" % (port_list))
+
+
+ def port_update(self, context, port_obj):
+ """Update a port.
+ :param context: request context.
+ :param port_obj: a port object to update.
+ :return: updated port objects."""
+
+ port_obj.save(context)
+ return port_obj
+
+ def port_delete(self, context, port_obj):
+ """Delete a port.
+
+ :param context: request context.
+ :param port_obj: a port object to delete."""
+
+ port_obj.destory(context)
+
+ def check_port_exist(self, context, port_obj):
+ """Delete a port.
+
+ :param port_obj: a port object to delete.
+ :returns: True/False exist or not exist.
+ """
+ return objects.Port.get(context=context, phy_port_name=port_obj.phy_port_name, \
+ pci_slot=port_obj.pci_slot, computer_node=port_obj.computer_node)
+
+ # deployable object
+ def deployable_create(self, context, obj_dep):
+ """Create a new deployable.
+ :param context: request context.
+ :param obj_dep: a changed (but not saved) obj_dep object.
+ :returns: created obj_dep object.
+ """
+ obj_dep.create(context)
+ return obj_dep
+
+ def deployable_update(self, context, obj_dep):
+ """Update a deployable.
+ :param context: request context.
+ :param obj_dep: a deployable object to update.
+ :returns: updated deployable object.
+ """
+ obj_dep.save(context)
+ return obj_dep
+
+ def deployable_delete(self, context, obj_dep):
+ """Delete a deployable.
+ :param context: request context.
+ :param obj_dep: a deployable object to delete.
+ """
+ obj_dep.destroy(context)
+
+ def deployable_get(self, context, uuid):
+ """Retrieve a deployable.
+ :param context: request context.
+ :param uuid: UUID of a deployable.
+ :returns: requested deployable object.
+ """
+ return objects.Deployable.get(context, uuid)
+
+ def deployable_get_by_host(self, context, host):
+ """Retrieve a deployable.
+ :param context: request context.
+ :param host: host on which the deployable is located.
+ :returns: requested deployable object.
+ """
+ return objects.Deployable.get_by_host(context, host)
+
+ def deployable_list(self, context):
+ """Retrieve a list of deployables.
+ :param context: request context.
+ :returns: a list of deployable objects.
+ """
+ return objects.Deployable.list(context)
+
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/rpcapi.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/rpcapi.py
new file mode 100644
index 0000000..451846d
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conductor/rpcapi.py
@@ -0,0 +1,192 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Client side of the conductor RPC API."""
+
+from oslo_config import cfg
+import oslo_messaging as messaging
+
+from cyborg.common import constants
+from cyborg.common import rpc
+from cyborg.objects import base as objects_base
+
+
+CONF = cfg.CONF
+
+
+class ConductorAPI(object):
+ """Client side of the conductor RPC API.
+
+ API version history:
+
+ | 1.0 - Initial version.
+
+ """
+
+ RPC_API_VERSION = '1.0'
+
+ def __init__(self, topic=None):
+ super(ConductorAPI, self).__init__()
+ self.topic = topic or constants.CONDUCTOR_TOPIC
+ target = messaging.Target(topic=self.topic,
+ version='1.0')
+ serializer = objects_base.CyborgObjectSerializer()
+ self.client = rpc.get_client(target,
+ version_cap=self.RPC_API_VERSION,
+ serializer=serializer)
+
+ def accelerator_create(self, context, obj_acc):
+ """Signal to conductor service to create an accelerator.
+
+ :param context: request context.
+ :param obj_acc: a created (but not saved) accelerator object.
+ :returns: created accelerator object.
+ """
+ cctxt = self.client.prepare(topic=self.topic, server=CONF.host)
+ return cctxt.call(context, 'accelerator_create', obj_acc=obj_acc)
+
+ def accelerator_update(self, context, acc_obj):
+ """Signal to conductor service to update an accelerator.
+
+ :param context: request context.
+ :param acc_obj: an accelerator object to update.
+ :returns: updated accelerator object.
+ """
+ cctxt = self.client.prepare(topic=self.topic, server=CONF.host)
+ return cctxt.call(context, 'accelerator_update', acc_obj=acc_obj)
+
+ def accelerator_delete(self, context, acc_obj):
+ """Signal to conductor service to delete an accelerator.
+
+ :param context: request context.
+ :param acc_obj: an accelerator to delete.
+ """
+ cctxt = self.client.prepare(topic=self.topic, server=CONF.host)
+ cctxt.call(context, 'accelerator_delete', acc_obj=acc_obj)
+
+ def accelerator_list_one(self, context, obj_acc):
+ """Signal to conductor service to list an accelerator.
+
+ :param context: request context.
+ :param obj_acc: an accelerator object to list.
+ :returns: accelerator object.
+ """
+
+ cctxt = self.client.prepare(topic=self.topic, server=CONF.host)
+ cctxt.call(context, 'get_one', obj_acc=obj_acc)
+
+ def accelerator_list_all(self, context, obj_acc):
+ """Signal to conductor service to list all accelerators.
+
+ :param context: request context.
+ :param obj_acc: accelerator objects to list.
+ :returns: accelerator objects.
+
+ """
+
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'get_all', obj_acc=obj_acc)
+
+ def port_create(self, context, port_obj):
+ """Signal to conductor service to create a port.
+
+ :param context: request context.
+ :param port_obj: a created (but not saved) port object.
+ :returns: created port object.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'port_create', port_obj=port_obj)
+
+ def port_bulk_create(self, context, port_list):
+ """Signal to conductor service to create a port.
+
+ :param context: request context.
+ :param port_list: port list need to be create and save.
+ :returns: request result.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'port_bulk_create', port_list=port_list)
+
+ def port_update(self, context, port_obj):
+ """Signal to conductor service to update a port.
+
+ :param context: request context.
+ :param port_obj: a port object to update.
+ :returns: updated port object.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'port_update', port_obj=port_obj)
+
+ def port_delete(self, context, port_obj):
+ """Signal to conductor service to delete a port.
+
+ :param context: request context.
+ :param port_obj: a port to delete.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ cctxt.call(context, 'port_delete', port_obj=port_obj)
+
+ #deployable object
+ def deployable_create(self, context, obj_dep):
+ """Signal to conductor service to create a deployable.
+ :param context: request context.
+ :param obj_dep: a created (but not saved) deployable object.
+ :returns: created deployable object.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'deployable_create', obj_dep=obj_dep)
+
+ def deployable_update(self, context, obj_dep):
+ """Signal to conductor service to update a deployable.
+ :param context: request context.
+ :param obj_dep: a deployable object to update.
+ :returns: updated deployable object.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'deployable_update', obj_dep=obj_dep)
+
+ def deployable_delete(self, context, obj_dep):
+ """Signal to conductor service to delete a deployable.
+ :param context: request context.
+ :param obj_dep: a deployable object to delete.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ cctxt.call(context, 'deployable_delete', obj_dep=obj_dep)
+
+ def deployable_get(self, context, uuid):
+ """Signal to conductor service to get a deployable.
+ :param context: request context.
+ :param uuid: UUID of a deployable.
+ :returns: requested deployable object.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'deployable_get', uuid=uuid)
+
+ def deployable_get_by_host(self, context, host):
+ """Signal to conductor service to get a deployable by host.
+ :param context: request context.
+ :param host: host on which the deployable is located.
+ :returns: requested deployable object.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'deployable_get_by_host', host=host)
+
+ def deployable_list(self, context):
+ """Signal to conductor service to get a list of deployables.
+ :param context: request context.
+ :returns: a list of deployable objects.
+ """
+ cctxt = self.client.prepare(topic=self.topic)
+ return cctxt.call(context, 'deployable_list')
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/__init__.py
new file mode 100644
index 0000000..04f8785
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/__init__.py
@@ -0,0 +1,29 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+
+from cyborg.conf import api
+from cyborg.conf import database
+from cyborg.conf import default
+from cyborg.conf import placement
+
+
+CONF = cfg.CONF
+
+api.register_opts(CONF)
+database.register_opts(CONF)
+default.register_opts(CONF)
+placement.register_opts(CONF)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/api.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/api.py
new file mode 100644
index 0000000..3f1a533
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/api.py
@@ -0,0 +1,58 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+
+from cyborg.common.i18n import _
+
+
+opts = [
+ # oslo_config has no HostAddressOpt class. by bob
+ #cfg.HostAddressOpt('host_ip',
+ cfg.StrOpt('host_ip',
+ default='0.0.0.0',
+ help=_('The IP address on which cyborg-api listens.')),
+ cfg.PortOpt('port',
+ default=6666,
+ help=_('The TCP port on which cyborg-api listens.')),
+ cfg.IntOpt('api_workers',
+ help=_('Number of workers for OpenStack Cyborg API service. '
+ 'The default is equal to the number of CPUs available '
+ 'if that can be determined, else a default worker '
+ 'count of 1 is returned.')),
+ cfg.BoolOpt('enable_ssl_api',
+ default=False,
+ help=_("Enable the integrated stand-alone API to service "
+ "requests via HTTPS instead of HTTP. If there is a "
+ "front-end service performing HTTPS offloading from "
+ "the service, this option should be False; note, you "
+ "will want to change public API endpoint to represent "
+ "SSL termination URL with 'public_endpoint' option.")),
+ cfg.StrOpt('public_endpoint',
+ help=_("Public URL to use when building the links to the API "
+ "resources (for example, \"https://cyborg.rocks:6666\")."
+ " If None the links will be built using the request's "
+ "host URL. If the API is operating behind a proxy, you "
+ "will want to change this to represent the proxy's URL. "
+ "Defaults to None.")),
+]
+
+opt_group = cfg.OptGroup(name='api',
+ title='Options for the cyborg-api service')
+
+
+def register_opts(conf):
+ conf.register_group(opt_group)
+ conf.register_opts(opts, group=opt_group)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/database.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/database.py
new file mode 100644
index 0000000..be65355
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/database.py
@@ -0,0 +1,32 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+
+from cyborg.common.i18n import _
+
+
+opts = [
+ cfg.StrOpt('mysql_engine',
+ default='InnoDB',
+ help=_('MySQL engine to use.'))
+]
+
+opt_group = cfg.OptGroup(name='database',
+ title='Options for the database service')
+
+
+def register_opts(conf):
+ conf.register_opts(opts, group=opt_group)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/default.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/default.py
new file mode 100644
index 0000000..bd5e0fd
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/default.py
@@ -0,0 +1,69 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import socket
+
+from oslo_config import cfg
+
+from cyborg.common.i18n import _
+
+
+exc_log_opts = [
+ cfg.BoolOpt('fatal_exception_format_errors',
+ default=False,
+ help=_('Used if there is a formatting error when generating '
+ 'an exception message (a programming error). If True, '
+ 'raise an exception; if False, use the unformatted '
+ 'message.')),
+]
+
+service_opts = [
+ #cfg.HostAddressOpt('host',
+ cfg.StrOpt('host',
+ default=socket.getfqdn(),
+ sample_default='localhost',
+ help=_('Name of this node. This can be an opaque '
+ 'identifier. It is not necessarily a hostname, '
+ 'FQDN, or IP address. However, the node name '
+ 'must be valid within an AMQP key, and if using '
+ 'ZeroMQ, a valid hostname, FQDN, or IP address.')
+ ),
+ cfg.IntOpt('periodic_interval',
+ default=60,
+ help=_('Default interval (in seconds) for running periodic '
+ 'tasks.')),
+]
+
+path_opts = [
+ cfg.StrOpt('pybasedir',
+ default=os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '../')),
+ sample_default='/usr/lib/python/site-packages/cyborg/cyborg',
+ help=_('Directory where the cyborg python module is '
+ 'installed.')),
+ cfg.StrOpt('bindir',
+ default='$pybasedir/bin',
+ help=_('Directory where cyborg binaries are installed.')),
+ cfg.StrOpt('state_path',
+ default='$pybasedir',
+ help=_("Top-level directory for maintaining cyborg's state.")),
+]
+
+
+def register_opts(conf):
+ conf.register_opts(exc_log_opts)
+ conf.register_opts(service_opts)
+ conf.register_opts(path_opts)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/placement.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/placement.py
new file mode 100644
index 0000000..0c2506b
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/conf/placement.py
@@ -0,0 +1,66 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+
+from cyborg.common.i18n import _
+
+opts = [
+ cfg.StrOpt('region_name',
+ help=_('Name of placement region to use. Useful if keystone '
+ 'manages more than one region.')),
+ cfg.StrOpt('endpoint_type',
+ default='public',
+ choices=['public', 'admin', 'internal'],
+ help=_('Type of the placement endpoint to use. This endpoint '
+ 'will be looked up in the keystone catalog and should '
+ 'be one of public, internal or admin.')),
+ cfg.BoolOpt('insecure',
+ default=False,
+ help="""
+ If true, the vCenter server certificate is not verified.
+ If false, then the default CA truststore is used for
+ verification. Related options:
+ * ca_file: This option is ignored if "ca_file" is set.
+ """),
+ cfg.StrOpt('cafile',
+ default=None,
+ help="""
+ Specifies the CA bundle file to be used in verifying the
+ vCenter server certificate.
+ """),
+ cfg.StrOpt('certfile',
+ default=None,
+ help="""
+ Specifies the certificate file to be used in verifying
+ the vCenter server certificate.
+ """),
+ cfg.StrOpt('keyfile',
+ default=None,
+ help="""
+ Specifies the key file to be used in verifying the vCenter
+ server certificate.
+ """),
+ cfg.IntOpt('timeout',
+ default=None,
+ help=_('Timeout for inactive connections (in seconds)')),
+]
+
+opt_group = cfg.OptGroup(name='placement',
+ title='Options for the nova placement sync service')
+
+
+def register_opts(conf):
+ conf.register_opts(opts, group=opt_group) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/api.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/api.py
new file mode 100644
index 0000000..3b08955
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/api.py
@@ -0,0 +1,134 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Base classes for storage engines."""
+
+import abc
+
+from oslo_config import cfg
+from oslo_db import api as db_api
+import six
+
+
+_BACKEND_MAPPING = {'sqlalchemy': 'cyborg.db.sqlalchemy.api'}
+IMPL = db_api.DBAPI.from_config(cfg.CONF,
+ backend_mapping=_BACKEND_MAPPING,
+ lazy=True)
+
+
+def get_instance():
+ """Return a DB API instance."""
+ return IMPL
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Connection(object):
+ """Base class for storage system connections."""
+
+ @abc.abstractmethod
+ def __init__(self):
+ """Constructor."""
+
+ # accelerator
+ @abc.abstractmethod
+ def accelerator_create(self, context, values):
+ """Create a new accelerator."""
+
+ @abc.abstractmethod
+ def accelerator_get(self, context, uuid):
+ """Get requested accelerator."""
+
+ @abc.abstractmethod
+ def accelerator_list(self, context, limit, marker, sort_key, sort_dir, project_only):
+ """Get requested list accelerators."""
+
+ @abc.abstractmethod
+ def accelerator_update(self, context, uuid, values):
+ """Update an accelerator."""
+
+ @abc.abstractmethod
+ def accelerator_destory(self, context, uuid):
+ """Delete an accelerator."""
+
+ #ports
+ @abc.abstractmethod
+ def port_create(self, context, values):
+ """Create a new port."""
+
+ @abc.abstractmethod
+ def port_get(self, context, uuid):
+ """Get requested port."""
+
+ @abc.abstractmethod
+ def port_list(self, context, limit, marker, sort_key, sort_dir):
+ """Get requested list ports."""
+
+ @abc.abstractmethod
+ def port_update(self, context, uuid, values):
+ """Update a port."""
+
+ @abc.abstractmethod
+ def port_destory(self, context, uuid):
+ """Delete a port."""
+
+ #deployable
+ @abc.abstractmethod
+ def deployable_create(self, context, values):
+ """Create a new deployable."""
+
+ @abc.abstractmethod
+ def deployable_get(self, context, uuid):
+ """Get requested deployable."""
+
+ @abc.abstractmethod
+ def deployable_get_by_host(self, context, host):
+ """Get requested deployable by host."""
+
+ @abc.abstractmethod
+ def deployable_list(self, context):
+ """Get requested list of deployables."""
+
+ @abc.abstractmethod
+ def deployable_update(self, context, uuid, values):
+ """Update a deployable."""
+
+ @abc.abstractmethod
+ def deployable_delete(self, context, uuid):
+ """Delete a deployable."""
+
+ @abc.abstractmethod
+ def deployable_get_by_filters(self, context,
+ filters, sort_key='created_at',
+ sort_dir='desc', limit=None,
+ marker=None, columns_to_join=None):
+ """Get requested deployable by filter."""
+
+ #attribute table
+ @abc.abstractmethod
+ def attribute_create(self, context, key, value):
+ """Create a new attribute."""
+
+ @abc.abstractmethod
+ def attribute_get(self, context, uuid):
+ """Get requested attribute."""
+
+ @abc.abstractmethod
+ def attribute_update(self, context, uuid, key, value):
+ """Update an attribute's key value pair."""
+
+ @abc.abstractmethod
+ def attribute_delete(self, context, uuid):
+ """Delete an attribute."""
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/migration.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/migration.py
new file mode 100644
index 0000000..5c7f580
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/migration.py
@@ -0,0 +1,52 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Database setup and migration commands."""
+
+from oslo_config import cfg
+from stevedore import driver
+
+
+_IMPL = None
+
+
+def get_backend():
+ global _IMPL
+ if not _IMPL:
+ cfg.CONF.import_opt('backend', 'oslo_db.options', group='database')
+ _IMPL = driver.DriverManager("cyborg.database.migration_backend",
+ cfg.CONF.database.backend).driver
+ return _IMPL
+
+
+def upgrade(version=None):
+ """Migrate the database to `version` or the most recent version."""
+ return get_backend().upgrade(version)
+
+
+def version():
+ return get_backend().version()
+
+
+def stamp(version):
+ return get_backend().stamp(version)
+
+
+def revision(message, autogenerate):
+ return get_backend().revision(message, autogenerate)
+
+
+def create_schema():
+ return get_backend().create_schema()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic.ini b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic.ini
new file mode 100644
index 0000000..a768980
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic.ini
@@ -0,0 +1,54 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = %(here)s/alembic
+
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# max length of characters to apply to the
+# "slug" field
+#truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+#sqlalchemy.url = driver://user:pass@localhost/dbname
+
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/README b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/README
new file mode 100644
index 0000000..9af08b3
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/README
@@ -0,0 +1,12 @@
+Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation
+
+To create alembic migrations use:
+$ cyborg-dbsync revision --message --autogenerate
+
+Stamp db with most recent migration version, without actually running migrations
+$ cyborg-dbsync stamp --revision head
+
+Upgrade can be performed by:
+$ cyborg-dbsync - for backward compatibility
+$ cyborg-dbsync upgrade
+# cyborg-dbsync upgrade --revision head
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/env.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/env.py
new file mode 100644
index 0000000..982b99b
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/env.py
@@ -0,0 +1,61 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from logging import config as log_config
+
+from alembic import context
+from oslo_db.sqlalchemy import enginefacade
+
+try:
+ # NOTE(whaom): This is to register the DB2 alembic code which
+ # is an optional runtime dependency.
+ from ibm_db_alembic.ibm_db import IbmDbImpl # noqa
+except ImportError:
+ pass
+
+from cyborg.db.sqlalchemy import models
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+log_config.fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+target_metadata = models.Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ engine = enginefacade.get_legacy_facade().get_engine()
+ with engine.connect() as connection:
+ context.configure(connection=connection,
+ target_metadata=target_metadata)
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+run_migrations_online()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/script.py.mako b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/script.py.mako
new file mode 100644
index 0000000..3b1c960
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/script.py.mako
@@ -0,0 +1,18 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision}
+Create Date: ${create_date}
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/e410080397351_create_port_table.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/e410080397351_create_port_table.py
new file mode 100644
index 0000000..c42e0fa
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/e410080397351_create_port_table.py
@@ -0,0 +1,55 @@
+# Copyright 2018 Lenovo Research Co.,LTD.
+# All Rights Reserved.
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""create ports table migration.
+
+Revision ID: e41080397351
+Revises: Coco-Gao
+Create Date: 2018-01-26 17:34:36.010417
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = 'e41080397351'
+down_revision = 'f50980397351'
+
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+ op.create_table(
+ 'ports',
+ sa.Column('created_at', sa.DateTime(), nullable=True),
+ sa.Column('updated_at', sa.DateTime(), nullable=True),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('uuid', sa.String(length=36), nullable=False),
+ sa.Column('computer_node', sa.String(length=36), nullable=False),
+ sa.Column('phy_port_name', sa.String(length=255), nullable=False), #physical eth port
+ sa.Column('pci_slot', sa.String(length=255), nullable=False),
+ sa.Column('product_id', sa.Text(), nullable=False),
+ sa.Column('vendor_id', sa.Text(), nullable=False),
+ sa.Column('is_used', sa.Integer(), nullable=False), # 1 represents status:used, 0 represents status not-used.
+ sa.Column('accelerator_id', sa.String(length=36), nullable=True), #accelerator uuid
+ sa.Column('bind_instance_id', sa.String(length=36), nullable=True), #nova instance uuid
+ sa.Column('bind_port_id', sa.String(length=36), nullable=True), #neutron logical port uuid
+ sa.Column('device_type', sa.Text(), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_ENGINE='InnoDB',
+ mysql_DEFAULT_CHARSET='UTF8'
+ )
+
+def downgrade():
+ op.drop_table('ports')
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/f50980397351_initial_migration.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/f50980397351_initial_migration.py
new file mode 100644
index 0000000..bfbf232
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/alembic/versions/f50980397351_initial_migration.py
@@ -0,0 +1,101 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""initial migration.
+
+Revision ID: f50980397351
+Revises: None
+Create Date: 2017-08-15 08:44:36.010417
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = 'f50980397351'
+down_revision = None
+
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+ op.create_table(
+ 'accelerators',
+ sa.Column('created_at', sa.DateTime(), nullable=True),
+ sa.Column('updated_at', sa.DateTime(), nullable=True),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('uuid', sa.String(length=36), nullable=False),
+ sa.Column('name', sa.String(length=255), nullable=False),
+ sa.Column('description', sa.Text(), nullable=True),
+ sa.Column('project_id', sa.String(length=36), nullable=True),
+ sa.Column('user_id', sa.String(length=36), nullable=True),
+ sa.Column('device_type', sa.Text(), nullable=False),
+ sa.Column('acc_type', sa.Text(), nullable=False),
+ sa.Column('acc_capability', sa.Text(), nullable=False),
+ sa.Column('vendor_id', sa.Text(), nullable=False),
+ sa.Column('product_id', sa.Text(), nullable=False),
+ sa.Column('remotable', sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('uuid', name='uniq_accelerators0uuid'),
+ mysql_ENGINE='InnoDB',
+ mysql_DEFAULT_CHARSET='UTF8'
+ )
+
+ op.create_table(
+ 'deployables',
+ sa.Column('created_at', sa.DateTime(), nullable=True),
+ sa.Column('updated_at', sa.DateTime(), nullable=True),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('uuid', sa.String(length=36), nullable=False),
+ sa.Column('name', sa.String(length=255), nullable=False),
+ sa.Column('parent_uuid', sa.String(length=36),
+ sa.ForeignKey('deployables.uuid'), nullable=True),
+ sa.Column('root_uuid', sa.String(length=36),
+ sa.ForeignKey('deployables.uuid'), nullable=True),
+ sa.Column('pcie_address', sa.Text(), nullable=False),
+ sa.Column('host', sa.Text(), nullable=False),
+ sa.Column('board', sa.Text(), nullable=False),
+ sa.Column('vendor', sa.Text(), nullable=False),
+ sa.Column('version', sa.Text(), nullable=False),
+ sa.Column('type', sa.Text(), nullable=False),
+ sa.Column('assignable', sa.Boolean(), nullable=False),
+ sa.Column('instance_uuid', sa.String(length=36), nullable=True),
+ sa.Column('availability', sa.Text(), nullable=False),
+ # sa.Column('accelerator_id', sa.Integer(),
+ # sa.ForeignKey('accelerators.id', ondelete="CASCADE"),
+ # nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('uuid', name='uniq_deployables0uuid'),
+ sa.Index('deployables_parent_uuid_idx', 'parent_uuid'),
+ sa.Index('deployables_root_uuid_idx', 'root_uuid'),
+ mysql_ENGINE='InnoDB',
+ mysql_DEFAULT_CHARSET='UTF8'
+ )
+
+ op.create_table(
+ 'attributes',
+ sa.Column('created_at', sa.DateTime(), nullable=True),
+ sa.Column('updated_at', sa.DateTime(), nullable=True),
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('uuid', sa.String(length=36), nullable=False),
+ sa.Column('deployable_id', sa.Integer(),
+ sa.ForeignKey('deployables.id', ondelete="CASCADE"),
+ nullable=False),
+ sa.Column('key', sa.Text(), nullable=False),
+ sa.Column('value', sa.Text(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('uuid', name='uniq_attributes0uuid'),
+ sa.Index('attributes_deployable_id_idx', 'deployable_id'),
+ mysql_ENGINE='InnoDB',
+ mysql_DEFAULT_CHARSET='UTF8'
+ )
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/api.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/api.py
new file mode 100644
index 0000000..22233fb
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/api.py
@@ -0,0 +1,513 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""SQLAlchemy storage backend."""
+
+import threading
+import copy
+from oslo_db import api as oslo_db_api
+from oslo_db import exception as db_exc
+from oslo_db.sqlalchemy import enginefacade
+from oslo_db.sqlalchemy import utils as sqlalchemyutils
+from oslo_log import log
+from oslo_utils import strutils
+from oslo_utils import uuidutils
+from sqlalchemy.orm.exc import NoResultFound
+
+from cyborg.common import exception
+from cyborg.db.sqlalchemy import models
+from cyborg.common.i18n import _
+from cyborg.db import api
+
+
+_CONTEXT = threading.local()
+LOG = log.getLogger(__name__)
+
+
+def get_backend():
+ """The backend is this module itself."""
+ return Connection()
+
+
+def _session_for_read():
+ return enginefacade.reader.using(_CONTEXT)
+
+
+def _session_for_write():
+ return enginefacade.writer.using(_CONTEXT)
+
+
+def model_query(context, model, *args, **kwargs):
+ """Query helper for simpler session usage.
+
+ :param context: Context of the query
+ :param model: Model to query. Must be a subclass of ModelBase.
+ :param args: Arguments to query. If None - model is used.
+
+ Keyword arguments:
+
+ :keyword project_only:
+ If set to True, then will do query filter with context's project_id.
+ if set to False or absent, then will not do query filter with context's
+ project_id.
+ :type project_only: bool
+ """
+
+ if kwargs.pop("project_only", False):
+ kwargs["project_id"] = context.tenant
+
+ with _session_for_read() as session:
+ query = sqlalchemyutils.model_query(
+ model, session, args, **kwargs)
+ return query
+
+
+def add_identity_filter(query, value):
+ """Adds an identity filter to a query.
+
+ Filters results by ID, if supplied value is a valid integer.
+ Otherwise attempts to filter results by UUID.
+
+ :param query: Initial query to add filter to.
+ :param value: Value for filtering results by.
+ :return: Modified query.
+ """
+ if strutils.is_int_like(value):
+ return query.filter_by(id=value)
+ elif uuidutils.is_uuid_like(value):
+ return query.filter_by(uuid=value)
+ else:
+ raise exception.InvalidIdentity(identity=value)
+
+
+def _paginate_query(context, model, limit, marker, sort_key, sort_dir, query):
+ sort_keys = ['id']
+ if sort_key and sort_key not in sort_keys:
+ sort_keys.insert(0, sort_key)
+ try:
+ query = sqlalchemyutils.paginate_query(query, model, limit, sort_keys,
+ marker=marker,
+ sort_dir=sort_dir)
+ except db_exc.InvalidSortKey:
+ raise exception.InvalidParameterValue(
+ _('The sort_key value "%(key)s" is an invalid field for sorting')
+ % {'key': sort_key})
+ return query.all()
+
+
+class Connection(api.Connection):
+ """SqlAlchemy connection."""
+
+ def __init__(self):
+ pass
+
+ def accelerator_create(self, context, values):
+ if not values.get('uuid'):
+ values['uuid'] = uuidutils.generate_uuid()
+
+ accelerator = models.Accelerator()
+ accelerator.update(values)
+
+ with _session_for_write() as session:
+ try:
+ session.add(accelerator)
+ session.flush()
+ except db_exc.DBDuplicateEntry:
+ raise exception.AcceleratorAlreadyExists(uuid=values['uuid'])
+ return accelerator
+
+ def accelerator_get(self, context, uuid):
+ query = model_query(context, models.Accelerator).filter_by(uuid=uuid)
+ try:
+ return query.one()
+ except NoResultFound:
+ raise exception.AcceleratorNotFound(uuid=uuid)
+
+ def accelerator_list(self, context, limit, marker, sort_key, sort_dir,
+ project_only):
+ query = model_query(context, models.Accelerator,
+ project_only = project_only)
+
+ return _paginate_query(context, models.Accelerator,limit,marker,
+ sort_key, sort_dir, query)
+
+ def accelerator_update(self, context, uuid, values):
+ if 'uuid' in values:
+ msg = _("Cannot overwrite UUID for existing Accelerator.")
+ raise exception.InvalidParameterValue(err = msg)
+
+ try:
+ return self._do_update_accelerator(context, uuid, values)
+ except db_exc.DBDuplicateEntry as e:
+ if 'name' in e.columns:
+ raise exception.DuplicateName(name=values['name'])
+
+
+ @oslo_db_api.retry_on_deadlock
+ def _do_update_accelerator(self, context, uuid, values):
+ with _session_for_write():
+ query = model_query(context, models.Port)
+ query = add_identity_filter(query, uuid)
+ try:
+ ref = query.with_lockmode('update').one()
+ except NoResultFound:
+ raise exception.PortNotFound(uuid=uuid)
+
+ ref.update(values)
+ return ref
+
+ @oslo_db_api.retry_on_deadlock
+ def accelerator_destory(self, context, uuid):
+ with _session_for_write():
+ query = model_query(context, models.Accelerator)
+ query = add_identity_filter(query, uuid)
+ count = query.delete()
+ if count != 1:
+ raise exception.AcceleratorNotFound(uuid=uuid)
+
+
+
+ def port_create(self, context, values):
+ if not values.get('uuid'):
+ values['uuid'] = uuidutils.generate_uuid()
+ if not values.get('is_used'):
+ values['is_used'] = 0
+
+ port = models.Port()
+ port.update(values)
+
+ with _session_for_write() as session:
+ try:
+ session.add(port)
+ session.flush()
+ except db_exc.DBDuplicateEntry:
+ raise exception.PortAlreadyExists(uuid=values['uuid'])
+ return port
+
+ def port_get(self, context, uuid):
+ query = model_query(context, models.Port).filter_by(uuid=uuid)
+ try:
+ return query.one()
+ except NoResultFound:
+ raise exception.PortNotFound(uuid=uuid)
+
+ def port_get(self, context, computer_node, phy_port_name, pci_slot):
+ query = model_query(context, models.Port).filter_by(computer_node=computer_node).\
+ filter_by(phy_port_name=phy_port_name).filter_by(pci_slot=pci_slot)
+ try:
+ return query.one()
+ except NoResultFound:
+ return None
+
+ def port_list(self, context, limit, marker, sort_key, sort_dir):
+ query = model_query(context, models.Port)
+
+ return _paginate_query(context, models.Port, limit, marker,
+ sort_key, sort_dir, query)
+
+ def port_update(self, context, uuid, values):
+ if 'uuid' in values:
+ msg = _("Cannot overwrite UUID for existing Port.")
+ raise exception.InvalidParameterValue(err=msg)
+
+ try:
+ return self._do_update_port(context, uuid, values)
+ except db_exc.DBDuplicateEntry as e:
+ if 'name' in e.columns:
+ raise exception.PortDuplicateName(name=values['name'])
+
+ @oslo_db_api.retry_on_deadlock
+ def _do_update_port(self, context, uuid, values):
+ with _session_for_write():
+ query = model_query(context, models.Port)
+ query = add_identity_filter(query, uuid)
+ try:
+ ref = query.with_lockmode('update').one()
+ except NoResultFound:
+ raise exception.PortNotFound(uuid=uuid)
+ ref.update(values)
+ return ref
+
+ @oslo_db_api.retry_on_deadlock
+ def port_destory(self, context, uuid):
+ with _session_for_write():
+ query = model_query(context, models.Port)
+ query = add_identity_filter(query, uuid)
+ count = query.delete()
+ if count == 0:
+ raise exception.PortNotFound(uuid=uuid)
+
+
+ #deployables table operations.
+ def deployable_create(self, context, values):
+ if not values.get('uuid'):
+ values['uuid'] = uuidutils.generate_uuid()
+
+ if values.get('id'):
+ values.pop('id', None)
+
+ deployable = models.Deployable()
+ deployable.update(values)
+
+ with _session_for_write() as session:
+ try:
+ session.add(deployable)
+ session.flush()
+ except db_exc.DBDuplicateEntry:
+ raise exception.DeployableAlreadyExists(uuid=values['uuid'])
+ return deployable
+
+ def deployable_get(self, context, uuid):
+ query = model_query(
+ context,
+ models.Deployable).filter_by(uuid=uuid)
+ try:
+ return query.one()
+ except NoResultFound:
+ raise exception.DeployableNotFound(uuid=uuid)
+
+ def deployable_get_by_host(self, context, host):
+ query = model_query(
+ context,
+ models.Deployable).filter_by(host=host)
+ return query.all()
+
+ def deployable_list(self, context):
+ query = model_query(context, models.Deployable)
+ return query.all()
+
+ def deployable_update(self, context, uuid, values):
+ if 'uuid' in values:
+ msg = _("Cannot overwrite UUID for an existing Deployable.")
+ raise exception.InvalidParameterValue(err=msg)
+
+ try:
+ return self._do_update_deployable(context, uuid, values)
+ except db_exc.DBDuplicateEntry as e:
+ if 'name' in e.columns:
+ raise exception.DuplicateDeployableName(name=values['name'])
+
+ @oslo_db_api.retry_on_deadlock
+ def _do_update_deployable(self, context, uuid, values):
+ with _session_for_write():
+ query = model_query(context, models.Deployable)
+ #query = add_identity_filter(query, uuid)
+ query = query.filter_by(uuid=uuid)
+ try:
+ ref = query.with_lockmode('update').one()
+ except NoResultFound:
+ raise exception.DeployableNotFound(uuid=uuid)
+
+ ref.update(values)
+ return ref
+
+ @oslo_db_api.retry_on_deadlock
+ def deployable_delete(self, context, uuid):
+ with _session_for_write():
+ query = model_query(context, models.Deployable)
+ query = add_identity_filter(query, uuid)
+ query.update({'root_uuid': None})
+ count = query.delete()
+ if count != 1:
+ raise exception.DeployableNotFound(uuid=uuid)
+
+ def deployable_get_by_filters(self, context,
+ filters, sort_key='created_at',
+ sort_dir='desc', limit=None,
+ marker=None, join_columns=None):
+ """Return list of deployables matching all filters sorted by
+ the sort_key. See deployable_get_by_filters_sort for
+ more information.
+ """
+ return self.deployable_get_by_filters_sort(context, filters,
+ limit=limit, marker=marker,
+ join_columns=join_columns,
+ sort_keys=[sort_key],
+ sort_dirs=[sort_dir])
+
+ def _exact_deployable_filter(self, query, filters, legal_keys):
+ """Applies exact match filtering to a deployable query.
+ Returns the updated query. Modifies filters argument to remove
+ filters consumed.
+ :param query: query to apply filters to
+ :param filters: dictionary of filters; values that are lists,
+ tuples, sets, or frozensets cause an 'IN' test to
+ be performed, while exact matching ('==' operator)
+ is used for other values
+ :param legal_keys: list of keys to apply exact filtering to
+ """
+
+ filter_dict = {}
+ model = models.Deployable
+ # Walk through all the keys
+ for key in legal_keys:
+ # Skip ones we're not filtering on
+ if key not in filters:
+ continue
+
+ # OK, filtering on this key; what value do we search for?
+ value = filters.pop(key)
+
+ if isinstance(value, (list, tuple, set, frozenset)):
+ if not value:
+ return None
+ # Looking for values in a list; apply to query directly
+ column_attr = getattr(model, key)
+ query = query.filter(column_attr.in_(value))
+ else:
+ filter_dict[key] = value
+ # Apply simple exact matches
+ if filter_dict:
+ query = query.filter(*[getattr(models.Deployable, k) == v
+ for k, v in filter_dict.items()])
+ return query
+
+ def deployable_get_by_filters_sort(self, context, filters, limit=None,
+ marker=None, join_columns=None,
+ sort_keys=None, sort_dirs=None):
+ """Return deployables that match all filters sorted by the given
+ keys. Deleted deployables will be returned by default, unless
+ there's a filter that says otherwise.
+ """
+
+ if limit == 0:
+ return []
+
+ sort_keys, sort_dirs = self.process_sort_params(sort_keys,
+ sort_dirs,
+ default_dir='desc')
+ query_prefix = model_query(context, models.Deployable)
+ filters = copy.deepcopy(filters)
+
+ exact_match_filter_names = ['uuid', 'name',
+ 'parent_uuid', 'root_uuid',
+ 'pcie_address', 'host',
+ 'board', 'vendor', 'version',
+ 'type', 'assignable', 'instance_uuid',
+ 'availability', 'accelerator_id']
+
+ # Filter the query
+ query_prefix = self._exact_deployable_filter(query_prefix,
+ filters,
+ exact_match_filter_names)
+ if query_prefix is None:
+ return []
+ deployables = query_prefix.all()
+ return deployables
+
+ def attribute_create(self, context, key, value):
+ update_fields = {'key': key, 'value': value}
+ update_fields['uuid'] = uuidutils.generate_uuid()
+
+ attribute = models.Attribute()
+ attribute.update(update_fields)
+
+ with _session_for_write() as session:
+ try:
+ session.add(attribute)
+ session.flush()
+ except db_exc.DBDuplicateEntry:
+ raise exception.AttributeAlreadyExists(
+ uuid=update_fields['uuid'])
+ return attribute
+
+ def attribute_get(self, context, uuid):
+ query = model_query(
+ context,
+ models.Attribute).filter_by(uuid=uuid)
+ try:
+ return query.one()
+ except NoResultFound:
+ raise exception.AttributeNotFound(uuid=uuid)
+
+ def attribute_get_by_deployable_uuid(self, context, deployable_uuid):
+ query = model_query(
+ context,
+ models.Attribute).filter_by(deployable_uuid=deployable_uuid)
+ try:
+ return query.all()
+ except NoResultFound:
+ raise exception.AttributeNotFound(uuid=uuid)
+
+ def attribute_update(self, context, uuid, key, value):
+ return self._do_update_attribute(context, uuid, key, value)
+
+ @oslo_db_api.retry_on_deadlock
+ def _do_update_attribute(self, context, uuid, key, value):
+ update_fields = {'key': key, 'value': value}
+ with _session_for_write():
+ query = model_query(context, models.Attribute)
+ query = add_identity_filter(query, uuid)
+ try:
+ ref = query.with_lockmode('update').one()
+ except NoResultFound:
+ raise exception.AttributeNotFound(uuid=uuid)
+
+ ref.update(update_fields)
+ return ref
+
+ def attribute_delete(self, context, uuid):
+ with _session_for_write():
+ query = model_query(context, models.Attribute)
+ query = add_identity_filter(query, uuid)
+ count = query.delete()
+ if count != 1:
+ raise exception.AttributeNotFound(uuid=uuid)
+
+ def process_sort_params(self, sort_keys, sort_dirs,
+ default_keys=['created_at', 'id'],
+ default_dir='asc'):
+
+ # Determine direction to use for when adding default keys
+ if sort_dirs and len(sort_dirs) != 0:
+ default_dir_value = sort_dirs[0]
+ else:
+ default_dir_value = default_dir
+
+ # Create list of keys (do not modify the input list)
+ if sort_keys:
+ result_keys = list(sort_keys)
+ else:
+ result_keys = []
+
+ # If a list of directions is not provided,
+ # use the default sort direction for all provided keys
+ if sort_dirs:
+ result_dirs = []
+ # Verify sort direction
+ for sort_dir in sort_dirs:
+ if sort_dir not in ('asc', 'desc'):
+ msg = _("Unknown sort direction, must be 'desc' or 'asc'")
+ raise exception.InvalidInput(reason=msg)
+ result_dirs.append(sort_dir)
+ else:
+ result_dirs = [default_dir_value for _sort_key in result_keys]
+
+ # Ensure that the key and direction length match
+ while len(result_dirs) < len(result_keys):
+ result_dirs.append(default_dir_value)
+ # Unless more direction are specified, which is an error
+ if len(result_dirs) > len(result_keys):
+ msg = _("Sort direction size exceeds sort key size")
+ raise exception.InvalidInput(reason=msg)
+
+ # Ensure defaults are included
+ for key in default_keys:
+ if key not in result_keys:
+ result_keys.append(key)
+ result_dirs.append(default_dir_value)
+
+ return result_keys, result_dirs
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/migration.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/migration.py
new file mode 100644
index 0000000..d805f77
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/migration.py
@@ -0,0 +1,108 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import alembic
+from alembic import config as alembic_config
+import alembic.migration as alembic_migration
+from oslo_db import exception as db_exc
+from oslo_db.sqlalchemy import enginefacade
+
+from cyborg.db.sqlalchemy import models
+
+
+def _alembic_config():
+ path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
+ config = alembic_config.Config(path)
+ return config
+
+
+def version(config=None, engine=None):
+ """Current database version.
+
+ :returns: Database version
+ :rtype: string
+ """
+ if engine is None:
+ engine = enginefacade.get_legacy_facade().get_engine()
+ with engine.connect() as conn:
+ context = alembic_migration.MigrationContext.configure(conn)
+ return context.get_current_revision()
+
+
+def upgrade(revision, config=None):
+ """Used for upgrading database.
+
+ :param version: Desired database version
+ :type version: string
+ """
+ revision = revision or 'head'
+ config = config or _alembic_config()
+ alembic.command.upgrade(config, revision)
+
+
+def create_schema(config=None, engine=None):
+ """Create database schema from models description.
+
+ Can be used for initial installation instead of upgrade('head').
+ """
+ if engine is None:
+ engine = enginefacade.get_legacy_facade().get_engine()
+
+ if version(engine=engine) is not None:
+ raise db_exc.DBMigrationError("DB schema is already under version"
+ " control. Use upgrade() instead")
+
+ models.Base.metadata.create_all(engine)
+ stamp('head', config=config)
+
+
+def downgrade(revision, config=None):
+ """Used for downgrading database.
+
+ :param version: Desired database version
+ :type version: string
+ """
+ revision = revision or 'base'
+ config = config or _alembic_config()
+ return alembic.command.downgrade(config, revision)
+
+
+def stamp(revision, config=None):
+ """Stamps database with provided revision.
+
+ Don't run any migrations.
+
+ :param revision: Should match one from repository or head - to stamp
+ database with most recent revision
+ :type revision: string
+ """
+ config = config or _alembic_config()
+ return alembic.command.stamp(config, revision=revision)
+
+
+def revision(message=None, autogenerate=False, config=None):
+ """Creates template for migration.
+
+ :param message: Text that will be used for migration title
+ :type message: string
+ :param autogenerate: If True - generates diff based on current database
+ state
+ :type autogenerate: bool
+ """
+ config = config or _alembic_config()
+ return alembic.command.revision(config, message=message,
+ autogenerate=autogenerate)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/models.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/models.py
new file mode 100644
index 0000000..5a301c4
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/db/sqlalchemy/models.py
@@ -0,0 +1,132 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""SQLAlchemy models for accelerator service."""
+
+from oslo_db import options as db_options
+from oslo_db.sqlalchemy import models
+import six.moves.urllib.parse as urlparse
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import Column, String, Integer, Boolean, ForeignKey, Index
+from sqlalchemy import schema
+from sqlalchemy import Text
+
+from cyborg.common import paths
+from cyborg.conf import CONF
+
+
+_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('cyborg.sqlite')
+db_options.set_defaults(CONF, connection=_DEFAULT_SQL_CONNECTION)
+
+
+def table_args():
+ engine_name = urlparse.urlparse(CONF.database.connection).scheme
+ if engine_name == 'mysql':
+ return {'mysql_engine': CONF.database.mysql_engine,
+ 'mysql_charset': "utf8"}
+ return None
+
+
+class CyborgBase(models.TimestampMixin, models.ModelBase):
+ metadata = None
+
+ def as_dict(self):
+ d = {}
+ for c in self.__table__.columns:
+ d[c.name] = self[c.name]
+ return d
+
+
+Base = declarative_base(cls=CyborgBase)
+
+
+class Accelerator(Base):
+ """Represents the accelerators."""
+
+ __tablename__ = 'accelerators'
+ __table_args__ = (
+ schema.UniqueConstraint('uuid', name='uniq_accelerators0uuid'),
+ table_args()
+ )
+
+ id = Column(Integer, primary_key=True)
+ uuid = Column(String(36), nullable=False)
+ name = Column(String(255), nullable=False)
+ description = Column(String(255), nullable=True)
+ project_id = Column(String(36), nullable=True)
+ user_id = Column(String(36), nullable=True)
+ device_type = Column(String(255), nullable=False)
+ acc_type = Column(String(255), nullable=False)
+ acc_capability = Column(String(255), nullable=False)
+ vendor_id = Column(String(255), nullable=False)
+ product_id = Column(String(255), nullable=False)
+ remotable = Column(Integer, nullable=False)
+
+
+class Port(Base):
+ """Represents the ports which physical cards provided."""
+
+ __tablename__ = 'ports'
+ __table_args__ = (
+ schema.UniqueConstraint('uuid', name='uniq_ports0uuid'),
+ table_args()
+ )
+
+ id = Column(Integer, primary_key=True)
+ uuid = Column(String(36), nullable=False)
+ computer_node = Column(String(36), nullable=False)
+ phy_port_name = Column(String(255), nullable=False)
+ pci_slot = Column(String(255), nullable=False)
+ vendor_id = Column(String(255), nullable=False)
+ product_id = Column(String(255), nullable=False)
+ is_used = Column(Integer, nullable=False)
+ accelerator_id = Column(String(36), nullable=True)
+ bind_instance_id = Column(String(36), nullable=True)
+ bind_port_id = Column(String(36), nullable=True)
+ device_type = Column(String(255), nullable=True)
+
+
+class Deployable(Base):
+ """Represents the deployables which physical cards provided."""
+
+ __tablename__ = 'deployables'
+ __table_args__ = (
+ schema.UniqueConstraint('uuid', name='uniq_deployables0uuid'),
+ Index('deployables_parent_uuid_idx', 'parent_uuid'),
+ Index('deployables_root_uuid_idx', 'root_uuid'),
+ # Index('deployables_accelerator_id_idx', 'accelerator_id'),
+ table_args()
+ )
+
+ id = Column(Integer, primary_key=True)
+ uuid = Column(String(36), nullable=False)
+ name = Column(String(36), nullable=False)
+ parent_uuid = Column(String(36),
+ ForeignKey('deployables.uuid'),nullable=True)
+ root_uuid = Column(String(36),
+ ForeignKey('deployables.uuid'), nullable=True)
+ pcie_address = Column(String(255), nullable=False)
+ host = Column(String(255), nullable=False)
+ board = Column(String(255), nullable=False)
+ vendor = Column(String(255), nullable=False)
+ version = Column(String(255), nullable=False)
+ type = Column(String(255), nullable=False)
+ assignable = Column(Boolean, nullable=False)
+ instance_uuid = Column(String(36), nullable=True)
+ availability = Column(String(255), nullable=False)
+ # accelerator_id = Column(Integer,
+ # ForeignKey('accelerators.id', ondelete="CASCADE"),
+ # nullable=False)
+
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/__init__.py
new file mode 100644
index 0000000..a313564
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/__init__.py
@@ -0,0 +1,30 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# NOTE(comstud): You may scratch your head as you see code that imports
+# this module and then accesses attributes for objects such as Node,
+# etc, yet you do not see these attributes in here. Never fear, there is
+# a little bit of magic. When objects are registered, an attribute is set
+# on this module automatically, pointing to the newest/latest version of
+# the object.
+
+
+def register_all():
+ # NOTE(danms): You must make sure your object gets imported in this
+ # function in order for it to be registered by services that may
+ # need to receive it via RPC.
+ __import__('cyborg.objects.accelerator')
+ __import__('cyborg.objects.port')
+ __import__('cyborg.objects.deployable')
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/accelerator.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/accelerator.py
new file mode 100644
index 0000000..a19774b
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/accelerator.py
@@ -0,0 +1,84 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_log import log as logging
+from oslo_versionedobjects import base as object_base
+
+from cyborg.db import api as dbapi
+from cyborg.objects import base
+from cyborg.objects import fields as object_fields
+
+
+LOG = logging.getLogger(__name__)
+
+
+@base.CyborgObjectRegistry.register
+class Accelerator(base.CyborgObject, object_base.VersionedObjectDictCompat):
+ # Version 1.0: Initial version
+ VERSION = '1.0'
+
+ dbapi = dbapi.get_instance()
+
+ fields = {
+ 'id': object_fields.IntegerField(nullable=False),
+ 'uuid': object_fields.UUIDField(nullable=False),
+ 'name': object_fields.StringField(nullable=False),
+ 'description': object_fields.StringField(nullable=True),
+ 'project_id': object_fields.UUIDField(nullable=True),
+ 'user_id': object_fields.UUIDField(nullable=True),
+ 'device_type': object_fields.StringField(nullable=False),
+ # The type of the accelerator device, e.g GPU, FPGA, ...
+ 'acc_type': object_fields.StringField(nullable=False),
+ # acc_type defines the usage of the accelerator, e.g Crypto
+ 'acc_capability': object_fields.StringField(nullable=False),
+ # acc_capability defines the specific capability, e.g AES
+ 'vendor_id': object_fields.StringField(nullable=False),
+ # vendor_id refers to ids like NVIDIA, XILINX, INTEL,...
+ 'product_id': object_fields.StringField(nullable=False),
+ # product_id refers to ids like P100
+ 'remotable': object_fields.IntegerField(nullable=False),
+ # remotable ids if remote accelerator is supported
+ }
+
+ def create(self, context):
+ """Create an Accelerator record in the DB."""
+ values = self.obj_get_changes()
+ db_acc= self.dbapi.accelerator_create(context, values)
+ self._from_db_object(self, db_acc)
+
+ @classmethod
+ def get(cls, context, uuid):
+ """Find a DB Accelerator and return an Ojb Accelerator."""
+ db_acc = cls.dbapi.accelerator_get(context, uuid)
+ obj_acc = cls._from_db_object(cls(context), db_acc)
+ return obj_acc
+
+ @classmethod
+ def list(cls, context, limit, marker, sort_key, sort_dir, project_only):
+ """Return a list of Accelerator objects."""
+ db_accs = cls.dbapi.accelerator_list(context, limit, marker, sort_key,
+ sort_dir, project_only)
+ return cls._from_db_object_list(context, db_accs)
+
+ def save(self, context):
+ """Update an Accelerator record in the DB."""
+ updates = self.obj_get_changes()
+ db_acc = self.dbapi.accelerator_update(context, self.uuid, updates)
+ self._from_db_object(self, db_acc)
+
+ def destory(self, context):
+ """Delete the Accelerator record from the DB."""
+ self.dbapi.accelerator_destory(context, self.uuid)
+ self.obj_reset_changes()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/attribute.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/attribute.py
new file mode 100644
index 0000000..460424b
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/attribute.py
@@ -0,0 +1,84 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_log import log as logging
+from oslo_versionedobjects import base as object_base
+
+from cyborg.common import exception
+from cyborg.db import api as dbapi
+from cyborg.objects import base
+from cyborg.objects import fields as object_fields
+
+
+LOG = logging.getLogger(__name__)
+
+
+@base.CyborgObjectRegistry.register
+class Attribute(base.CyborgObject, object_base.VersionedObjectDictCompat):
+ # Version 1.0: Initial version
+ VERSION = '1.0'
+
+ dbapi = dbapi.get_instance()
+
+ fields = {
+ 'id': object_fields.IntegerField(nullable=False),
+ 'uuid': object_fields.UUIDField(nullable=False),
+ 'deployable_id': object_fields.IntegerField(nullable=False),
+ 'key': object_fields.StringField(nullable=False),
+ 'value': object_fields.StringField(nullable=False)
+ }
+
+ def create(self, context):
+ """Create an attribute record in the DB."""
+ if self.deployable_id is None:
+ raise exception.AttributeInvalid()
+
+ values = self.obj_get_changes()
+ db_attr = self.dbapi.attribute_create(context,
+ self.key,
+ self.value)
+ self._from_db_object(self, db_attr)
+
+ @classmethod
+ def get(cls, context, uuid):
+ """Find a DB attribute and return an Obj attribute."""
+ db_attr = cls.dbapi.attribute_get(context, uuid)
+ obj_attr = cls._from_db_object(cls(context), db_attr)
+ return obj_attr
+
+ @classmethod
+ def attribute_get_by_deployable_uuid(cls, context, deployable_uuid):
+ """Get an attribute by deployable uuid."""
+ db_attr = cls.dbapi.attribute_get_by_deployable_uuid(context,
+ deployable_uuid)
+ return cls._from_db_object_list(db_attr, context)
+
+ def save(self, context):
+ """Update an attribute record in the DB."""
+ updates = self.obj_get_changes()
+ db_attr = self.dbapi.attribute_update(context,
+ self.uuid,
+ self.key,
+ self.value)
+ self._from_db_object(self, db_attr)
+
+ def destroy(self, context):
+ """Delete an attribute from the DB."""
+ self.dbapi.attribute_delete(context, self.uuid)
+ self.obj_reset_changes()
+
+ def set_key_value_pair(self, set_key, set_value):
+ self.key = set_key
+ self.value = set_value
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/base.py
new file mode 100644
index 0000000..49370f9
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/base.py
@@ -0,0 +1,176 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Cyborg common internal object model"""
+
+from oslo_utils import versionutils
+from oslo_versionedobjects import base as object_base
+
+from cyborg import objects
+from cyborg.objects import fields as object_fields
+import netaddr
+
+
+class CyborgObjectRegistry(object_base.VersionedObjectRegistry):
+ def registration_hook(self, cls, index):
+ # NOTE(jroll): blatantly stolen from nova
+ # NOTE(danms): This is called when an object is registered,
+ # and is responsible for maintaining cyborg.objects.$OBJECT
+ # as the highest-versioned implementation of a given object.
+ version = versionutils.convert_version_to_tuple(cls.VERSION)
+ if not hasattr(objects, cls.obj_name()):
+ setattr(objects, cls.obj_name(), cls)
+ else:
+ cur_version = versionutils.convert_version_to_tuple(
+ getattr(objects, cls.obj_name()).VERSION)
+ if version >= cur_version:
+ setattr(objects, cls.obj_name(), cls)
+
+
+class CyborgObject(object_base.VersionedObject):
+ """Base class and object factory.
+
+ This forms the base of all objects that can be remoted or instantiated
+ via RPC. Simply defining a class that inherits from this base class
+ will make it remotely instantiatable. Objects should implement the
+ necessary "get" classmethod routines as well as "save" object methods
+ as appropriate.
+ """
+
+ OBJ_SERIAL_NAMESPACE = 'cyborg_object'
+ OBJ_PROJECT_NAMESPACE = 'cyborg'
+
+ fields = {
+ 'created_at': object_fields.DateTimeField(nullable=True),
+ 'updated_at': object_fields.DateTimeField(nullable=True),
+ }
+
+ def as_dict(self):
+ return dict((k, getattr(self, k))
+ for k in self.fields
+ if hasattr(self, k))
+
+ @staticmethod
+ def _from_db_object(obj, db_obj):
+ """Converts a database entity to a formal object.
+
+ :param obj: An object of the class.
+ :param db_obj: A DB model of the object
+ :return: The object of the class with the database entity added
+ """
+
+ for field in obj.fields:
+ obj[field] = db_obj[field]
+
+ obj.obj_reset_changes()
+ return obj
+
+ @classmethod
+ def _from_db_object_list(cls, context, db_objs):
+ """Converts a list of database entities to a list of formal objects."""
+ objs = []
+ for db_obj in db_objs:
+ objs.append(cls._from_db_object(cls(context), db_obj))
+ return objs
+
+class CyborgObjectSerializer(object_base.VersionedObjectSerializer):
+ # Base class to use for object hydration
+ OBJ_BASE_CLASS = CyborgObject
+
+
+CyborgObjectDictCompat = object_base.VersionedObjectDictCompat
+
+
+class CyborgPersistentObject(object):
+ """Mixin class for Persistent objects.
+ This adds the fields that we use in common for most persistent objects.
+ """
+ fields = {
+ 'created_at': object_fields.DateTimeField(nullable=True),
+ 'updated_at': object_fields.DateTimeField(nullable=True),
+ 'deleted_at': object_fields.DateTimeField(nullable=True),
+ 'deleted': object_fields.BooleanField(default=False),
+ }
+
+
+class ObjectListBase(object_base.ObjectListBase):
+
+ @classmethod
+ def _obj_primitive_key(cls, field):
+ return 'cyborg_object.%s' % field
+
+ @classmethod
+ def _obj_primitive_field(cls, primitive, field,
+ default=object_fields.UnspecifiedDefault):
+ key = cls._obj_primitive_key(field)
+ if default == object_fields.UnspecifiedDefault:
+ return primitive[key]
+ else:
+ return primitive.get(key, default)
+
+
+def obj_to_primitive(obj):
+ """Recursively turn an object into a python primitive.
+ A CyborgObject becomes a dict, and anything that implements ObjectListBase
+ becomes a list.
+ """
+ if isinstance(obj, ObjectListBase):
+ return [obj_to_primitive(x) for x in obj]
+ elif isinstance(obj, CyborgObject):
+ result = {}
+ for key in obj.obj_fields:
+ if obj.obj_attr_is_set(key) or key in obj.obj_extra_fields:
+ result[key] = obj_to_primitive(getattr(obj, key))
+ return result
+ elif isinstance(obj, netaddr.IPAddress):
+ return str(obj)
+ elif isinstance(obj, netaddr.IPNetwork):
+ return str(obj)
+ else:
+ return obj
+
+
+
+def obj_equal_prims(obj_1, obj_2, ignore=None):
+ """Compare two primitives for equivalence ignoring some keys.
+ This operation tests the primitives of two objects for equivalence.
+ Object primitives may contain a list identifying fields that have been
+ changed - this is ignored in the comparison. The ignore parameter lists
+ any other keys to be ignored.
+ :param:obj1: The first object in the comparison
+ :param:obj2: The second object in the comparison
+ :param:ignore: A list of fields to ignore
+ :returns: True if the primitives are equal ignoring changes
+ and specified fields, otherwise False.
+ """
+
+ def _strip(prim, keys):
+ if isinstance(prim, dict):
+ for k in keys:
+ prim.pop(k, None)
+ for v in prim.values():
+ _strip(v, keys)
+ if isinstance(prim, list):
+ for v in prim:
+ _strip(v, keys)
+ return prim
+
+ if ignore is not None:
+ keys = ['cyborg_object.changes'] + ignore
+ else:
+ keys = ['cyborg_object.changes']
+ prim_1 = _strip(obj_1.obj_to_primitive(), keys)
+ prim_2 = _strip(obj_2.obj_to_primitive(), keys)
+ return prim_1 == prim_2
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/deployable.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/deployable.py
new file mode 100644
index 0000000..3f152c6
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/deployable.py
@@ -0,0 +1,139 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_log import log as logging
+from oslo_versionedobjects import base as object_base
+
+from cyborg.common import exception
+from cyborg.db import api as dbapi
+from cyborg.objects import base
+from cyborg.objects import fields as object_fields
+# from cyborg.objects.attribute import Attribute
+
+
+LOG = logging.getLogger(__name__)
+
+
+@base.CyborgObjectRegistry.register
+class Deployable(base.CyborgObject, object_base.VersionedObjectDictCompat):
+ # Version 1.0: Initial version
+ VERSION = '1.0'
+
+ dbapi = dbapi.get_instance()
+ attributes_list = []
+
+ fields = {
+ 'id': object_fields.IntegerField(nullable=False),
+ 'uuid': object_fields.UUIDField(nullable=False),
+ 'name': object_fields.StringField(nullable=False),
+ 'parent_uuid': object_fields.UUIDField(nullable=True),
+ # parent_uuid refers to the id of the VF's parent node
+ 'root_uuid': object_fields.UUIDField(nullable=True),
+ # root_uuid refers to the id of the VF's root which has to be a PF
+ 'pcie_address': object_fields.StringField(nullable=False),
+ 'host': object_fields.StringField(nullable=False),
+ 'board': object_fields.StringField(nullable=False),
+ # board refers to a specific acc board type, e.g P100 GPU card
+ 'vendor': object_fields.StringField(nullable=False),
+ 'version': object_fields.StringField(nullable=False),
+ 'type': object_fields.StringField(nullable=False),
+ # similar to the acc_type in accelerator.py
+ 'assignable': object_fields.BooleanField(nullable=False),
+ # identify if a instance is in use
+ 'instance_uuid': object_fields.UUIDField(nullable=True),
+ # The id of the virtualized accelerator instance
+ 'availability': object_fields.StringField(nullable=False),
+ # identify the state of acc, e.g released/claimed/...
+ # 'accelerator_id': object_fields.IntegerField(nullable=False)
+ # Foreign key constrain to reference accelerator table.
+ }
+
+ def _get_parent_root_uuid(self):
+ obj_dep = Deployable.get(None, self.parent_uuid)
+ return obj_dep.root_uuid
+
+ def create(self, context):
+ """Create a Deployable record in the DB."""
+ if 'uuid' not in self:
+ raise exception.ObjectActionError(action='create',
+ reason='uuid is required')
+
+ if self.parent_uuid is None:
+ self.root_uuid = self.uuid
+ else:
+ self.root_uuid = self._get_parent_root_uuid()
+
+ values = self.obj_get_changes()
+ db_dep = self.dbapi.deployable_create(context, values)
+ self._from_db_object(self, db_dep)
+
+ @classmethod
+ def get(cls, context, uuid):
+ """Find a DB Deployable and return an Obj Deployable."""
+ db_dep = cls.dbapi.deployable_get(context, uuid)
+ obj_dep = cls._from_db_object(cls(context), db_dep)
+ return obj_dep
+
+ @classmethod
+ def get_by_host(cls, context, host):
+ """Get a Deployable by host."""
+ db_deps = cls.dbapi.deployable_get_by_host(context, host)
+ return cls._from_db_object_list(context, db_deps)
+
+ @classmethod
+ def list(cls, context):
+ """Return a list of Deployable objects."""
+ db_deps = cls.dbapi.deployable_list(context)
+ return cls._from_db_object_list(context, db_deps)
+
+ def save(self, context):
+ """Update a Deployable record in the DB."""
+ updates = self.obj_get_changes()
+ db_dep = self.dbapi.deployable_update(context, self.uuid, updates)
+ self._from_db_object(self, db_dep)
+
+ def destroy(self, context):
+ """Delete a Deployable from the DB."""
+ self.dbapi.deployable_delete(context, self.uuid)
+ self.obj_reset_changes()
+
+ def add_attribute(self, attribute):
+ """add a attribute object to the attribute_list.
+ If the attribute already exists, it will update the value,
+ otherwise, the vf will be appended to the list.
+ """
+ if not isinstance(attribute, Attribute):
+ raise exception.InvalidDeployType()
+ for exist_attr in self.attributes_list:
+ if base.obj_equal_prims(vf, exist_attr):
+ LOG.warning("The attribute already exists.")
+ return None
+
+ @classmethod
+ def get_by_filter(cls, context,
+ filters, sort_key='created_at',
+ sort_dir='desc', limit=None,
+ marker=None, join=None):
+ obj_dpl_list = []
+ db_dpl_list = cls.dbapi.deployable_get_by_filters(context, filters,
+ sort_key=sort_key,
+ sort_dir=sort_dir,
+ limit=limit,
+ marker=marker,
+ join_columns=join)
+ for db_dpl in db_dpl_list:
+ obj_dpl = cls._from_db_object(cls(context), db_dpl)
+ obj_dpl_list.append(obj_dpl)
+ return obj_dpl_list
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/fields.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/fields.py
new file mode 100644
index 0000000..52d3349
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/fields.py
@@ -0,0 +1,30 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_versionedobjects import fields as object_fields
+
+
+IntegerField = object_fields.IntegerField
+UUIDField = object_fields.UUIDField
+StringField = object_fields.StringField
+DateTimeField = object_fields.DateTimeField
+# added for port object
+BooleanField = object_fields.BooleanField
+ObjectField = object_fields.ObjectField
+ListOfObjectsField = object_fields.ListOfObjectsField
+ListOfStringsField = object_fields.ListOfStringsField
+IPAddressField = object_fields.IPAddressField
+IPNetworkField = object_fields.IPNetworkField
+UnspecifiedDefault = object_fields.UnspecifiedDefault
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/physical_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/physical_function.py
new file mode 100644
index 0000000..4445565
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/physical_function.py
@@ -0,0 +1,137 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+from oslo_log import log as logging
+from oslo_versionedobjects import base as object_base
+
+from cyborg.common import exception
+from cyborg.db import api as dbapi
+from cyborg.objects import base
+from cyborg.objects import fields as object_fields
+from cyborg.objects.deployable import Deployable
+from cyborg.objects.virtual_function import VirtualFunction
+
+LOG = logging.getLogger(__name__)
+
+
+@base.CyborgObjectRegistry.register
+class PhysicalFunction(Deployable):
+ # Version 1.0: Initial version
+ VERSION = '1.0'
+ virtual_function_list = []
+
+ def create(self, context):
+ # To ensure the creating type is PF
+ if self.type != 'pf':
+ raise exception.InvalidDeployType()
+ super(PhysicalFunction, self).create(context)
+
+ def save(self, context):
+ """In addition to save the pf, it should also save the
+ vfs associated with this pf
+ """
+ # To ensure the saving type is PF
+ if self.type != 'pf':
+ raise exception.InvalidDeployType()
+
+ for exist_vf in self.virtual_function_list:
+ exist_vf.save(context)
+ super(PhysicalFunction, self).save(context)
+
+ def add_vf(self, vf):
+ """add a vf object to the virtual_function_list.
+ If the vf already exists, it will ignore,
+ otherwise, the vf will be appended to the list
+ """
+ if not isinstance(vf, VirtualFunction) or vf.type != 'vf':
+ raise exception.InvalidDeployType()
+ for exist_vf in self.virtual_function_list:
+ if base.obj_equal_prims(vf, exist_vf):
+ LOG.warning("The vf already exists")
+ return None
+ vf.parent_uuid = self.uuid
+ vf.root_uuid = self.root_uuid
+ vf_copy = copy.deepcopy(vf)
+ self.virtual_function_list.append(vf_copy)
+
+ def delete_vf(self, context, vf):
+ """remove a vf from the virtual_function_list
+ if the vf does not exist, ignore it
+ """
+ for idx, exist_vf in self.virtual_function_list:
+ if base.obj_equal_prims(vf, exist_vf):
+ removed_vf = self.virtual_function_list.pop(idx)
+ removed_vf.destroy(context)
+ return
+ LOG.warning("The removing vf does not exist!")
+
+ def destroy(self, context):
+ """Delete a the pf from the DB."""
+ del self.virtual_function_list[:]
+ super(PhysicalFunction, self).destroy(context)
+
+ @classmethod
+ def get(cls, context, uuid):
+ """Find a DB Physical Function and return an Obj Physical Function.
+ In addition, it will also finds all the Virtual Functions associated
+ with this Physical Function and place them in virtual_function_list
+ """
+ db_pf = cls.dbapi.deployable_get(context, uuid)
+ obj_pf = cls._from_db_object(cls(context), db_pf)
+ pf_uuid = obj_pf.uuid
+
+ query = {"parent_uuid": pf_uuid, "type": "vf"}
+ db_vf_list = cls.dbapi.deployable_get_by_filters(context, query)
+
+ for db_vf in db_vf_list:
+ obj_vf = VirtualFunction.get(context, db_vf.uuid)
+ obj_pf.virtual_function_list.append(obj_vf)
+ return obj_pf
+
+ @classmethod
+ def get_by_filter(cls, context,
+ filters, sort_key='created_at',
+ sort_dir='desc', limit=None,
+ marker=None, join=None):
+ obj_dpl_list = []
+ filters['type'] = 'pf'
+ db_dpl_list = cls.dbapi.deployable_get_by_filters(context, filters,
+ sort_key=sort_key,
+ sort_dir=sort_dir,
+ limit=limit,
+ marker=marker,
+ join_columns=join)
+ for db_dpl in db_dpl_list:
+ obj_dpl = cls._from_db_object(cls(context), db_dpl)
+ query = {"parent_uuid": obj_dpl.uuid}
+ vf_get_list = VirtualFunction.get_by_filter(context,
+ query)
+ obj_dpl.virtual_function_list = vf_get_list
+ obj_dpl_list.append(obj_dpl)
+ return obj_dpl_list
+
+ @classmethod
+ def _from_db_object(cls, obj, db_obj):
+ """Converts a physical function to a formal object.
+
+ :param obj: An object of the class.
+ :param db_obj: A DB model of the object
+ :return: The object of the class with the database entity added
+ """
+ obj = Deployable._from_db_object(obj, db_obj)
+ if cls is PhysicalFunction:
+ obj.virtual_function_list = []
+ return obj \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/port.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/port.py
new file mode 100644
index 0000000..6379db6
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/port.py
@@ -0,0 +1,91 @@
+# Copyright 2018 Lenovo Research Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_log import log as logging
+from oslo_versionedobjects import base as object_base
+
+from cyborg.db import api as dbapi
+from cyborg.objects import base
+from cyborg.objects import fields as object_fields
+
+LOG = logging.getLogger(__name__)
+
+@base.CyborgObjectRegistry.register
+class Port(base.CyborgObject, object_base.VersionedObjectDictCompat):
+ # Version 1.0: Initial version
+ VERSION = '1.0'
+
+ dbapi = dbapi.get_instance()
+
+ fields = {
+ 'uuid': object_fields.UUIDField(nullable=False),
+ 'computer_node': object_fields.UUIDField(nullable=False),
+ 'phy_port_name': object_fields.StringField(nullable=True),
+ 'pci_slot': object_fields.StringField(nullable=True),
+ 'product_id': object_fields.StringField(nullable=True),
+ 'vendor_id': object_fields.StringField(nullable=False),
+ 'is_used': object_fields.IntegerField(nullable=False),
+ 'accelerator_id': object_fields.UUIDField(nullable=True),
+ 'bind_instance_id': object_fields.UUIDField(nullable=True),
+ 'bind_port_id': object_fields.UUIDField(nullable=True),
+ 'device_type': object_fields.StringField(nullable=True),
+ }
+
+ def __init__(self, *args, **kwargs):
+ super(Port, self).__init__(*args, **kwargs)
+
+ def create(self, context=None):
+ """Create an Port record in the DB, this can be used by cyborg-agents
+ to auto register physical port of network cards."""
+ values = self.obj_get_changes()
+ db_port= self.dbapi.port_create(context, values)
+ self._from_db_object(self, db_port)
+
+ @classmethod
+ def get(cls, context, uuid):
+ """Find a DB Port and return an Ojb Port."""
+ db_port = cls.dbapi.port_get(context, uuid)
+ obj_port = cls._from_db_object(cls(context), db_port)
+ return obj_port
+
+ @classmethod
+ def get(cls, context, phy_port_name, pci_slot, computer_node):
+ """Return a list of Port objects."""
+ db_port = cls.dbapi.port_get(context, phy_port_name=phy_port_name,
+ pci_slot=pci_slot, computer_node=computer_node)
+ if db_port:
+ obj_port = cls._from_db_object(cls(context), db_port)
+ return obj_port
+ else:
+ return None
+
+ @classmethod
+ def list(cls, context, limit, marker, sort_key, sort_dir):
+ """Return a list of Port objects."""
+ db_ports = cls.dbapi.port_list(context, limit, marker, sort_key,
+ sort_dir)
+ obj_ports = cls._from_db_object_list(context, db_ports)
+ return obj_ports
+
+ def save(self, context):
+ """Update a Port record in the DB."""
+ updates = self.obj_get_changes()
+ db_port = self.dbapi.port_update(context, self.uuid, updates)
+ self._from_db_object(self, db_port)
+
+ def destory(self, context):
+ """Delete the Port record in the DB."""
+ self.dbapi.port_destory(context, self.uuid)
+ self.obj_reset_changes()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/virtual_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/virtual_function.py
new file mode 100644
index 0000000..3258c9d
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/objects/virtual_function.py
@@ -0,0 +1,61 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_log import log as logging
+from oslo_versionedobjects import base as object_base
+
+from cyborg.common import exception
+from cyborg.db import api as dbapi
+from cyborg.objects import base
+from cyborg.objects import fields as object_fields
+from cyborg.objects.deployable import Deployable
+
+LOG = logging.getLogger(__name__)
+
+
+@base.CyborgObjectRegistry.register
+class VirtualFunction(Deployable):
+ # Version 1.0: Initial version
+ VERSION = '1.0'
+
+ def create(self, context):
+ # To ensure the creating type is VF
+ if self.type != 'vf':
+ raise exception.InvalidDeployType()
+ super(VirtualFunction, self).create(context)
+
+ def save(self, context):
+ # To ensure the saving type is VF
+ if self.type != 'vf':
+ raise exception.InvalidDeployType()
+ super(VirtualFunction, self).save(context)
+
+ @classmethod
+ def get_by_filter(cls, context,
+ filters, sort_key='created_at',
+ sort_dir='desc', limit=None,
+ marker=None, join=None):
+ obj_dpl_list = []
+ filters['type'] = 'vf'
+ db_dpl_list = cls.dbapi.deployable_get_by_filters(context, filters,
+ sort_key=sort_key,
+ sort_dir=sort_dir,
+ limit=limit,
+ marker=marker,
+ join_columns=join)
+ for db_dpl in db_dpl_list:
+ obj_dpl = cls._from_db_object(cls(context), db_dpl)
+ obj_dpl_list.append(obj_dpl)
+ return obj_dpl_list
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/services/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/services/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/services/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/services/report.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/services/report.py
new file mode 100644
index 0000000..0449446
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/services/report.py
@@ -0,0 +1,165 @@
+# Copyright (c) 2018 Lenovo Technologies Co., Ltd
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystoneauth1 import exceptions as k_exc
+from keystoneauth1 import loading as k_loading
+from oslo_config import cfg
+from cyborg.common import exception as c_exc
+
+from oslo_concurrency import lockutils
+
+synchronized = lockutils.synchronized_with_prefix('cyborg-')
+
+PLACEMENT_CLIENT_SEMAPHORE = 'placement_client'
+
+
+def check_placement_api_available(f):
+ @functools.wraps(f)
+ def wrapper(self, *a, **k):
+ try:
+ return f(self, *a, **k)
+ except k_exc.EndpointNotFound:
+ raise c_exc.PlacementEndpointNotFound()
+ return wrapper
+
+
+class SchedulerReportClient(object):
+ """Client class for updating the scheduler.
+ This class is used for updating the placement DB on NOVA side
+ Cyborg DB should be kept up to date with the placement DB all
+ the time.
+ Here is an example on how to use it:
+ from cyborg.services import report as placement_report_client
+ p_client = placement_report_client.SchedulerReportClient()
+ resource_provider = {'name': 'rp_name', 'uuid': 'uuid'}
+ p_client.create_resource_provider(resource_provider)
+ """
+
+ keystone_filter = {'service_type': 'placement',
+ 'region_name': cfg.CONF.placement.region_name}
+
+ def __init__(self):
+ self.association_refresh_time = {}
+ self._client = self._create_client()
+ self._disabled = False
+
+ def _create_client(self):
+ """Create the HTTP session accessing the placement service."""
+ self.association_refresh_time = {}
+ auth_plugin = k_loading.load_auth_from_conf_options(
+ cfg.CONF, 'placement')
+ client = k_loading.load_session_from_conf_options(
+ cfg.CONF, 'placement', auth=auth_plugin)
+ client.additional_headers = {'accept': 'application/json'}
+ return client
+
+ def _get(self, url, **kwargs):
+ return self._client.get(url, endpoint_filter=self.keystone_filter,
+ **kwargs)
+
+ def _post(self, url, data, **kwargs):
+ return self._client.post(url, json=data,
+ endpoint_filter=self.keystone_filter,
+ **kwargs)
+
+ def _put(self, url, data, **kwargs):
+ return self._client.put(url, json=data,
+ endpoint_filter=self.keystone_filter,
+ **kwargs)
+
+ def _delete(self, url, **kwargs):
+ return self._client.delete(url, endpoint_filter=self.keystone_filter,
+ **kwargs)
+
+ @check_placement_api_available
+ def create_resource_provider(self, resource_provider):
+ """Create a resource provider.
+ :param resource_provider: The resource provider
+ :type resource_provider: dict: name (required), uuid (required)
+ """
+ url = '/resource_providers'
+ self._post(url, resource_provider)
+
+ @check_placement_api_available
+ def delete_resource_provider(self, resource_provider_uuid):
+ """Delete a resource provider.
+ :param resource_provider_uuid: UUID of the resource provider
+ :type resource_provider_uuid: str
+ """
+ url = '/resource_providers/%s' % resource_provider_uuid
+ self._delete(url)
+
+ @check_placement_api_available
+ def create_inventory(self, resource_provider_uuid, inventory):
+ """Create an inventory.
+ :param resource_provider_uuid: UUID of the resource provider
+ :type resource_provider_uuid: str
+ :param inventory: The inventory
+ :type inventory: dict: resource_class (required), total (required),
+ reserved (required), min_unit (required), max_unit (required),
+ step_size (required), allocation_ratio (required)
+ """
+ url = '/resource_providers/%s/inventories' % resource_provider_uuid
+ self._post(url, inventory)
+
+ @check_placement_api_available
+ def get_inventory(self, resource_provider_uuid, resource_class):
+ """Get resource provider inventory.
+ :param resource_provider_uuid: UUID of the resource provider
+ :type resource_provider_uuid: str
+ :param resource_class: Resource class name of the inventory to be
+ returned
+ :type resource_class: str
+ :raises c_exc.PlacementInventoryNotFound: For failure to find inventory
+ for a resource provider
+ """
+ url = '/resource_providers/%s/inventories/%s' % (
+ resource_provider_uuid, resource_class)
+ try:
+ return self._get(url).json()
+ except k_exc.NotFound as e:
+ if "No resource provider with uuid" in e.details:
+ raise c_exc.PlacementResourceProviderNotFound(
+ resource_provider=resource_provider_uuid)
+ elif _("No inventory of class") in e.details:
+ raise c_exc.PlacementInventoryNotFound(
+ resource_provider=resource_provider_uuid,
+ resource_class=resource_class)
+ else:
+ raise
+
+ @check_placement_api_available
+ def update_inventory(self, resource_provider_uuid, inventory,
+ resource_class):
+ """Update an inventory.
+ :param resource_provider_uuid: UUID of the resource provider
+ :type resource_provider_uuid: str
+ :param inventory: The inventory
+ :type inventory: dict
+ :param resource_class: The resource class of the inventory to update
+ :type resource_class: str
+ :raises c_exc.PlacementInventoryUpdateConflict: For failure to updste
+ inventory due to outdated resource_provider_generation
+ """
+ url = '/resource_providers/%s/inventories/%s' % (
+ resource_provider_uuid, resource_class)
+ try:
+ self._put(url, inventory)
+ except k_exc.Conflict:
+ raise c_exc.PlacementInventoryUpdateConflict(
+ resource_provider=resource_provider_uuid,
+ resource_class=resource_class) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/base.py
new file mode 100644
index 0000000..664afa6
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/base.py
@@ -0,0 +1,169 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+from oslo_config import cfg
+from oslo_config import fixture as config_fixture
+from oslo_context import context
+from oslo_db import options
+from oslo_log import log
+from oslotest import base
+import pecan
+import contextlib
+import mock
+
+from cyborg.common import config as cyborg_config
+from cyborg.tests.unit import policy_fixture
+
+
+
+CONF = cfg.CONF
+options.set_defaults(cfg.CONF)
+try:
+ log.register_options(CONF)
+except cfg.ArgsAlreadyParsedError:
+ pass
+
+
+class TestCase(base.BaseTestCase):
+ """Test case base class for all unit tests."""
+
+ def setUp(self):
+ super(TestCase, self).setUp()
+ self.context = context.get_admin_context()
+ self._set_config()
+ self.policy = self.useFixture(policy_fixture.PolicyFixture())
+
+ def _set_config(self):
+ self.cfg_fixture = self.useFixture(config_fixture.Config(cfg.CONF))
+ self.config(use_stderr=False,
+ fatal_exception_format_errors=True)
+ self.set_defaults(host='fake-mini',
+ debug=True)
+ self.set_defaults(connection="sqlite://",
+ sqlite_synchronous=False,
+ group='database')
+ cyborg_config.parse_args([], default_config_files=[])
+
+ def config(self, **kw):
+ """Override config options for a test."""
+ self.cfg_fixture.config(**kw)
+
+ def set_defaults(self, **kw):
+ """Set default values of config options."""
+ group = kw.pop('group', None)
+ for o, v in kw.items():
+ self.cfg_fixture.set_default(o, v, group=group)
+
+ def get_path(self, project_file=None):
+ """Get the absolute path to a file. Used for testing the API.
+
+ :param project_file: File whose path to return. Default: None.
+ :returns: path to the specified file, or path to project root.
+ """
+ root = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..')
+ )
+ if project_file:
+ return os.path.join(root, project_file)
+ else:
+ return root
+
+# Test worker cannot survive eventlet's Timeout exception, which effectively
+# kills the whole worker, with all test cases scheduled to it. This metaclass
+# makes all test cases convert Timeout exceptions into unittest friendly
+# failure mode (self.fail).
+class DietTestCase(base.BaseTestCase):
+ """Same great taste, less filling.
+ BaseTestCase is responsible for doing lots of plugin-centric setup
+ that not all tests require (or can tolerate). This class provides
+ only functionality that is common across all tests.
+ """
+
+ def setUp(self):
+ super(DietTestCase, self).setUp()
+
+ options.set_defaults(cfg.CONF, connection='sqlite://')
+
+ debugger = os.environ.get('OS_POST_MORTEM_DEBUGGER')
+ if debugger:
+ self.addOnException(post_mortem_debug.get_exception_handler(
+ debugger))
+
+ self.addCleanup(mock.patch.stopall)
+
+ self.addOnException(self.check_for_systemexit)
+ self.orig_pid = os.getpid()
+
+ def addOnException(self, handler):
+
+ def safe_handler(*args, **kwargs):
+ try:
+ return handler(*args, **kwargs)
+ except Exception:
+ with excutils.save_and_reraise_exception(reraise=False) as ctx:
+ self.addDetail('Failure in exception handler %s' % handler,
+ testtools.content.TracebackContent(
+ (ctx.type_, ctx.value, ctx.tb), self))
+
+ return super(DietTestCase, self).addOnException(safe_handler)
+
+ def check_for_systemexit(self, exc_info):
+ if isinstance(exc_info[1], SystemExit):
+ if os.getpid() != self.orig_pid:
+ # Subprocess - let it just exit
+ raise
+ # This makes sys.exit(0) still a failure
+ self.force_failure = True
+
+ @contextlib.contextmanager
+ def assert_max_execution_time(self, max_execution_time=5):
+ with eventlet.Timeout(max_execution_time, False):
+ yield
+ return
+ self.fail('Execution of this test timed out')
+
+ def assertOrderedEqual(self, expected, actual):
+ expect_val = self.sort_dict_lists(expected)
+ actual_val = self.sort_dict_lists(actual)
+ self.assertEqual(expect_val, actual_val)
+
+ def sort_dict_lists(self, dic):
+ for key, value in dic.items():
+ if isinstance(value, list):
+ dic[key] = sorted(value)
+ elif isinstance(value, dict):
+ dic[key] = self.sort_dict_lists(value)
+ return dic
+
+ def assertDictSupersetOf(self, expected_subset, actual_superset):
+ """Checks that actual dict contains the expected dict.
+ After checking that the arguments are of the right type, this checks
+ that each item in expected_subset is in, and matches, what is in
+ actual_superset. Separate tests are done, so that detailed info can
+ be reported upon failure.
+ """
+ if not isinstance(expected_subset, dict):
+ self.fail("expected_subset (%s) is not an instance of dict" %
+ type(expected_subset))
+ if not isinstance(actual_superset, dict):
+ self.fail("actual_superset (%s) is not an instance of dict" %
+ type(actual_superset))
+ for k, v in expected_subset.items():
+ self.assertIn(k, actual_superset)
+ self.assertEqual(v, actual_superset[k],
+ "Key %(key)s expected: %(exp)r, actual %(act)r" %
+ {'key': k, 'exp': v, 'act': actual_superset[k]}) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/functional/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/functional/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/functional/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/__init__.py
new file mode 100644
index 0000000..eeaaced
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/__init__.py
@@ -0,0 +1,38 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+:mod:`cyborg.tests.unit` -- cyborg unit tests
+=====================================================
+
+.. automodule:: cyborg.tests.unit
+ :platform: Unix
+"""
+
+import eventlet
+
+from cyborg import objects
+
+
+eventlet.monkey_patch(os=False)
+
+# Make sure this is done after eventlet monkey patching otherwise
+# the threading.local() store used in oslo_messaging will be initialized to
+# threadlocal storage rather than greenthread local. This will cause context
+# sets and deletes in that storage to clobber each other.
+# Make sure we have all of the objects loaded. We do this
+# at module import time, because we may be using mock decorators in our
+# tests that run at import time.
+objects.register_all()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/base.py
new file mode 100644
index 0000000..2041330
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/base.py
@@ -0,0 +1,105 @@
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+import os
+import subprocess
+
+import fixtures
+
+from cyborg.accelerator.drivers.fpga.base import FPGADriver
+from cyborg.accelerator.drivers.fpga.intel import sysinfo
+from cyborg.tests import base
+from cyborg.tests.unit.accelerator.drivers.fpga.intel import prepare_test_data
+
+
+class TestFPGADriver(base.TestCase):
+
+ def setUp(self):
+ super(TestFPGADriver, self).setUp()
+ self.syspath = sysinfo.SYS_FPGA
+ sysinfo.SYS_FPGA = "/sys/class/fpga"
+ tmp_sys_dir = self.useFixture(fixtures.TempDir())
+ prepare_test_data.create_fake_sysfs(tmp_sys_dir.path)
+ sysinfo.SYS_FPGA = os.path.join(
+ tmp_sys_dir.path, sysinfo.SYS_FPGA.split("/", 1)[-1])
+
+ def tearDown(self):
+ super(TestFPGADriver, self).tearDown()
+ sysinfo.SYS_FPGA = self.syspath
+
+ def test_create(self):
+ FPGADriver.create("intel")
+ self.assertRaises(LookupError, FPGADriver.create, "xilinx")
+
+ def test_discover(self):
+ d = FPGADriver()
+ self.assertRaises(NotImplementedError, d.discover)
+
+ def test_program(self):
+ d = FPGADriver()
+ self.assertRaises(NotImplementedError, d.program, "path", "image")
+
+ def test_intel_discover(self):
+ expect = [{'function': 'pf', 'assignable': False, 'pr_num': '1',
+ 'vendor_id': '0x8086', 'devices': '0000:5e:00.0',
+ 'regions': [{
+ 'function': 'vf', 'assignable': True,
+ 'product_id': '0xbcc1',
+ 'name': 'intel-fpga-dev.2',
+ 'parent_devices': '0000:5e:00.0',
+ 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA,
+ 'vendor_id': '0x8086',
+ 'devices': '0000:5e:00.1'}],
+ 'name': 'intel-fpga-dev.0',
+ 'parent_devices': '',
+ 'path': '%s/intel-fpga-dev.0' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc0'},
+ {'function': 'pf', 'assignable': True, 'pr_num': '0',
+ 'vendor_id': '0x8086', 'devices': '0000:be:00.0',
+ 'name': 'intel-fpga-dev.1',
+ 'parent_devices': '',
+ 'path': '%s/intel-fpga-dev.1' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc0'}]
+ expect.sort()
+
+ intel = FPGADriver.create("intel")
+ fpgas = intel.discover()
+ fpgas.sort()
+ self.assertEqual(2, len(fpgas))
+ self.assertEqual(fpgas, expect)
+
+ @mock.patch.object(subprocess, 'Popen', autospec=True)
+ def test_intel_program(self, mock_popen):
+
+ class p(object):
+ returncode = 0
+
+ def wait(self):
+ pass
+
+ b = "0x5e"
+ d = "0x00"
+ f = "0x0"
+ expect_cmd = ['sudo', 'fpgaconf', '-b', b,
+ '-d', d, '-f', f, '/path/image']
+ mock_popen.return_value = p()
+ intel = FPGADriver.create("intel")
+ # program VF
+ intel.program("0000:5e:00.1", "/path/image")
+ mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE)
+
+ # program PF
+ intel.program("0000:5e:00.0", "/path/image")
+ mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/driver.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/driver.py
new file mode 100644
index 0000000..5760ecf
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/driver.py
@@ -0,0 +1,93 @@
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+import os
+import subprocess
+
+import fixtures
+
+from cyborg.accelerator.drivers.fpga.intel import sysinfo
+from cyborg.accelerator.drivers.fpga.intel.driver import IntelFPGADriver
+from cyborg.tests import base
+from cyborg.tests.unit.accelerator.drivers.fpga.intel import prepare_test_data
+
+
+class TestIntelFPGADriver(base.TestCase):
+
+ def setUp(self):
+ super(TestIntelFPGADriver, self).setUp()
+ self.syspath = sysinfo.SYS_FPGA
+ sysinfo.SYS_FPGA = "/sys/class/fpga"
+ tmp_sys_dir = self.useFixture(fixtures.TempDir())
+ prepare_test_data.create_fake_sysfs(tmp_sys_dir.path)
+ sysinfo.SYS_FPGA = os.path.join(
+ tmp_sys_dir.path, sysinfo.SYS_FPGA.split("/", 1)[-1])
+
+ def tearDown(self):
+ super(TestIntelFPGADriver, self).tearDown()
+ sysinfo.SYS_FPGA = self.syspath
+
+ def test_discover(self):
+ expect = [{'function': 'pf', 'assignable': False, 'pr_num': '1',
+ 'vendor_id': '0x8086', 'devices': '0000:5e:00.0',
+ 'regions': [{
+ 'function': 'vf', 'assignable': True,
+ 'product_id': '0xbcc1',
+ 'name': 'intel-fpga-dev.2',
+ 'parent_devices': '0000:5e:00.0',
+ 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA,
+ 'vendor_id': '0x8086',
+ 'devices': '0000:5e:00.1'}],
+ 'name': 'intel-fpga-dev.0',
+ 'parent_devices': '',
+ 'path': '%s/intel-fpga-dev.0' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc0'},
+ {'function': 'pf', 'assignable': True, 'pr_num': '0',
+ 'vendor_id': '0x8086', 'devices': '0000:be:00.0',
+ 'parent_devices': '',
+ 'name': 'intel-fpga-dev.1',
+ 'path': '%s/intel-fpga-dev.1' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc0'}]
+ expect.sort()
+
+ intel = IntelFPGADriver()
+ fpgas = intel.discover()
+ fpgas.sort()
+ self.assertEqual(2, len(fpgas))
+ self.assertEqual(fpgas, expect)
+
+ @mock.patch.object(subprocess, 'Popen', autospec=True)
+ def test_intel_program(self, mock_popen):
+
+ class p(object):
+ returncode = 0
+
+ def wait(self):
+ pass
+
+ b = "0x5e"
+ d = "0x00"
+ f = "0x0"
+ expect_cmd = ['sudo', 'fpgaconf', '-b', b,
+ '-d', d, '-f', f, '/path/image']
+ mock_popen.return_value = p()
+ intel = IntelFPGADriver()
+ # program VF
+ intel.program("0000:5e:00.1", "/path/image")
+ mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE)
+
+ # program PF
+ intel.program("0000:5e:00.0", "/path/image")
+ mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/prepare_test_data.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/prepare_test_data.py
new file mode 100644
index 0000000..8955c39
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/prepare_test_data.py
@@ -0,0 +1,295 @@
+# Copyright 2018 Intel, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import argparse
+import copy
+import glob
+import os
+import shutil
+
+
+PF0_ADDR = "0000:5e:00.0"
+PF1_ADDR = "0000:be:00.0"
+VF0_ADDR = "0000:5e:00.1"
+FPGA_TREE = {
+ "dev.0": {"bdf": PF0_ADDR,
+ "regions": {"dev.2": {"bdf": VF0_ADDR}}},
+ "dev.1": {"bdf": PF1_ADDR}}
+
+SYS_DEVICES = "sys/devices"
+SYS_CLASS_FPGA = "sys/class/fpga"
+
+DEV_PREFIX = "intel-fpga"
+
+PGFA_DEVICE_COMMON_SUB_DIR = ["power"]
+
+PGFA_DEVICE_COMMON_CONTENT = {
+ "broken_parity_status": "0",
+ "class": "0x120000",
+ "config": "",
+ "consistent_dma_mask_bits": "64",
+ "d3cold_allowed": "1",
+ "device": "0xbcc0",
+ "dma_mask_bits": "64",
+ "driver_override": "(null)",
+ "enable": "1",
+ "irq": "16",
+ "local_cpulist": "0-111",
+ "local_cpus": "00000000,00000000,00000000,00000000,00000000,"
+ "00000000,00000000,00000000,00000000,00000000,"
+ "0000ffff,ffffffff,ffffffff,ffffffff",
+ "modalias": "pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00",
+ "msi_bus": "",
+ "numa_node": "-1",
+ "resource": [
+ "0x00000000c6000000 0x00000000c607ffff 0x000000000014220c",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x00000000c6080000 0x00000000c60fffff 0x000000000014220c",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x00000000c6100000 0x00000000c617ffff 0x000000000014220c",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000"],
+ "resource0": "",
+ "resource0_wc": "",
+ "subsystem_device": "0x0000",
+ "subsystem_vendor": "0x0000",
+ "uevent": [
+ "DRIVER=intel-fpga-pci",
+ "PCI_CLASS=120000",
+ "PCI_ID=8086:BCC0",
+ "PCI_SUBSYS_ID=0000:0000",
+ "PCI_SLOT_NAME=0000:5e:00.0",
+ "MODALIAS=pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00"],
+ "vendor": "0x8086"}
+
+PGFA_DEVICES_SPECIAL_COMMON_CONTENT = {
+ "dev.0": {
+ "resource2": "",
+ "resource2_wc": "",
+ "sriov_numvfs": "1",
+ "sriov_totalvfs": "1",
+ },
+ "dev.1": {
+ "resource": [
+ "0x00000000fbc00000 0x00000000fbc7ffff 0x000000000014220c",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x00000000fbc80000 0x00000000fbcfffff 0x000000000014220c",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x00000000fbd00000 0x00000000fbd7ffff 0x000000000014220c",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000"],
+ "resource2": "",
+ "resource2_wc": "",
+ "sriov_numvfs": "0",
+ "sriov_totalvfs": "1",
+ "uevent": [
+ "DRIVER=intel-fpga-pci",
+ "PCI_CLASS=120000",
+ "PCI_ID=8086:BCC0",
+ "PCI_SUBSYS_ID=0000:0000",
+ "PCI_SLOT_NAME=0000:be:00.0",
+ "MODALIAS=pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00"],
+ },
+ "dev.2": {
+ "d3cold_allowed": "0",
+ "device": "0xbcc1",
+ "modalias": "pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00",
+ "irq": "0",
+ "resource": [
+ "0x00000000c6100000 0x00000000c617ffff 0x000000000014220c",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000",
+ "0x0000000000000000 0x0000000000000000 0x0000000000000000"],
+ "uevent": [
+ "DRIVER=intel-fpga-pci",
+ "PCI_CLASS=120000",
+ "PCI_ID=8086:BCC1",
+ "PCI_SUBSYS_ID=0000:0000",
+ "PCI_SLOT_NAME=0000:5e:00.1",
+ "MODALIAS=pci:v00008086d0000BCC1sv00000000sd00000000bc12sc00i00"],
+ }
+}
+
+PGFA_DEVICE_COMMON_SOFT_LINK = {
+ "driver": "../../../bus/pci/drivers/intel-fpga-pci",
+ "iommu": "../../virtual/iommu/dmar8",
+ "iommu_group": "../../../kernel/iommu_groups/75",
+ "subsystem": "../../../bus/pci"
+}
+
+PGFA_DEVICES_SPECIAL_SOFT_LINK = {
+ "dev.0": {
+ "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:18/device:1d4",
+ },
+ "dev.1": {
+ "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:19/device:1d5",
+ "iommu": "../../virtual/iommu/dmar4",
+ "iommu_group": "../../../kernel/iommu_groups/76",
+ },
+ "dev.2": {
+ "iommu": "../../virtual/iommu/dmar9",
+ "iommu_group": "../../../kernel/iommu_groups/81",
+ }
+}
+PGFA_DEVICES_SPECIAL_SOFT_LINK = {
+ "dev.0": {
+ "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:18/device:1d4",
+ },
+ "dev.1": {
+ "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:19/device:1d5",
+ "iommu": "../../virtual/iommu/dmar4",
+ "iommu_group": "../../../kernel/iommu_groups/76",
+ },
+ "dev.2": {
+ "iommu": "../../virtual/iommu/dmar9",
+ "iommu_group": "../../../kernel/iommu_groups/81",
+ }
+}
+
+PGFA_DEVICE_PF_SOFT_LINK = {
+ "virtfn": lambda k, v: (k + str(int(v.rsplit(".", 1)[-1]) - 1),
+ "/".join(["..", v]))
+}
+
+PGFA_DEVICE_VF_SOFT_LINK = {
+ "physfn": lambda k, v: (k, "/".join(["..", v]))
+}
+
+
+def gen_fpga_content(path, dev):
+ content = copy.copy(PGFA_DEVICE_COMMON_CONTENT)
+ content.update(PGFA_DEVICES_SPECIAL_COMMON_CONTENT[dev])
+ for k, v in content.items():
+ p = os.path.join(path, k)
+ if not v:
+ os.mknod(p)
+ elif type(v) is str:
+ with open(p, 'a') as f:
+ f.write(v + "\n")
+ elif type(v) is list:
+ with open(p, 'a') as f:
+ f.writelines([l + "\n" for l in v])
+
+
+def gen_fpga_sub_dir(path):
+ for d in PGFA_DEVICE_COMMON_SUB_DIR:
+ p = os.path.join(path, d)
+ os.makedirs(p)
+
+
+def gen_fpga_pf_soft_link(path, bdf):
+ for k, v in PGFA_DEVICE_PF_SOFT_LINK.items():
+ if callable(v):
+ k, v = v(k, bdf)
+ os.symlink(v, os.path.join(path, k))
+
+
+def gen_fpga_common_soft_link(path, bdf):
+ for k, v in PGFA_DEVICE_COMMON_SOFT_LINK.items():
+ os.symlink(v, os.path.join(path, k))
+
+
+def gen_fpga_vf_soft_link(path, bdf):
+ for k, v in PGFA_DEVICE_VF_SOFT_LINK.items():
+ if callable(v):
+ k, v = v(k, bdf)
+ os.symlink(v, os.path.join(path, k))
+
+
+def create_devices_path_and_files(tree, device_path, class_fpga_path,
+ vf=False, pfinfo={}):
+ for k, v in tree.items():
+ bdf = v["bdf"]
+ pci_path = "pci" + bdf.rsplit(":", 1)[0]
+ bdf_path = os.path.join(device_path, pci_path, bdf)
+ ln = "-".join([DEV_PREFIX, k])
+ dev_path = os.path.join(bdf_path, "fpga", ln)
+ os.makedirs(dev_path)
+ gen_fpga_content(bdf_path, k)
+ gen_fpga_sub_dir(bdf_path)
+ if vf:
+ gen_fpga_pf_soft_link(pfinfo["path"], bdf)
+ gen_fpga_vf_soft_link(bdf_path, pfinfo["bdf"])
+ pfinfo = {"path": bdf_path, "bdf": bdf}
+ if "regions" in v:
+ create_devices_path_and_files(
+ v["regions"], device_path, class_fpga_path, True, pfinfo)
+ source = dev_path.split("sys")[-1]
+ os.symlink("../.." + source, os.path.join(class_fpga_path, ln))
+ os.symlink("../../../" + bdf, os.path.join(dev_path, "device"))
+
+
+def create_devices_soft_link(class_fpga_path):
+ devs = glob.glob1(class_fpga_path, "*")
+ for dev in devs:
+ path = os.path.realpath("%s/%s/device" % (class_fpga_path, dev))
+ softlinks = copy.copy(PGFA_DEVICE_COMMON_SOFT_LINK)
+ softlinks.update(
+ PGFA_DEVICES_SPECIAL_SOFT_LINK[dev.rsplit("-", 1)[-1]])
+ for k, v in softlinks.items():
+ source = os.path.normpath(os.path.join(path, v))
+ if not os.path.exists(source):
+ os.makedirs(source)
+ os.symlink(v, os.path.join(path, k))
+
+
+def create_fake_sysfs(prefix=""):
+ sys_device = os.path.join(prefix, SYS_DEVICES)
+ sys_class_fpga = os.path.join(prefix, SYS_CLASS_FPGA)
+ basedir = os.path.dirname(sys_device)
+ if os.path.exists(basedir):
+ shutil.rmtree(basedir, ignore_errors=False, onerror=None)
+ os.makedirs(sys_class_fpga)
+ create_devices_path_and_files(FPGA_TREE, sys_device, sys_class_fpga)
+ create_devices_soft_link(sys_class_fpga)
+
+
+def main():
+ create_fake_sysfs()
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Generate a fake sysfs for intel FPGA.")
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument("-v", "--verbose", action="store_true")
+ group.add_argument("-q", "--quiet", action="store_true")
+ parser.add_argument("-p", "--prefix", type=str,
+ default="/tmp", dest="p",
+ help='Set the prefix path of the fake sysfs. '
+ 'default "/tmp"')
+ args = parser.parse_args()
+
+ create_fake_sysfs(args.p)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/test_generic.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/test_generic.py
new file mode 100644
index 0000000..bf066e4
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/test_generic.py
@@ -0,0 +1,66 @@
+# Copyright 2017 Lenovo Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Base classes for Generic Driver tests."""
+
+import mock
+
+from cyborg.accelerator.drivers.generic_driver import GenericDriver as generic
+from cyborg.conductor.rpcapi import ConductorAPI as conductor_api
+
+FAKE_CONTEXT = mock.MagicMock()
+
+
+class GenericDriverTest():
+ """Class for testing of generic driver
+ """
+
+ def setUp(self):
+ super(GenericDriverTest, self).setUp()
+
+ @mock.patch.object(conductor_api, 'accelerator_create')
+ def test_create_accelerator(self, mock_acc_create):
+ mock_acc_create.return_value = self.acc
+ generic.create_accelerator(context=FAKE_CONTEXT)
+
+ mock_acc_create.assert_called()
+
+ @mock.patch.object(conductor_api, 'accelerator_list_one')
+ def test_get_accelerator(self, mock_acc_get):
+ mock_acc_get.return_value = self.acc
+ generic.get_accelerator(context=FAKE_CONTEXT)
+
+ mock_acc_get.assert_called()
+
+ @mock.patch.object(conductor_api, 'accelerator_list_all')
+ def test_list_accelerators(self, mock_acc_list):
+ mock_acc_list.return_value = self.acc
+ generic.list_accelerators(context=FAKE_CONTEXT)
+
+ mock_acc_list.assert_called()
+
+ @mock.patch.object(conductor_api, 'accelerator_update')
+ def test_update_accelerator(self, mock_acc_update):
+ mock_acc_update.return_value = self.acc
+ generic.update_accelerator(context=FAKE_CONTEXT)
+
+ mock_acc_update.assert_called()
+
+ @mock.patch.object(conductor_api, 'accelerator_delete')
+ def test_delete_accelerator(self, mock_acc_delete):
+ mock_acc_delete.return_value = self.acc
+ generic.delete_accelerator(context=FAKE_CONTEXT)
+
+ mock_acc_delete.assert_called()
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/test_nvmf.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/test_nvmf.py
new file mode 100644
index 0000000..9f9a5be
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/test_nvmf.py
@@ -0,0 +1,131 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cyborg.tests import base
+import mock
+from cyborg.accelerator.drivers.spdk.nvmf.nvmf import NVMFDRIVER
+from cyborg.accelerator.drivers.spdk.util import common_fun
+from cyborg.accelerator.drivers.spdk.util.pyspdk.nvmf_client import NvmfTgt
+
+
+class TestNVMFDRIVER(base.TestCase):
+
+ def setUp(self,):
+ super(TestNVMFDRIVER, self).setUp()
+ self.nvmf_driver = NVMFDRIVER()
+
+ def tearDown(self):
+ super(TestNVMFDRIVER, self).tearDown()
+ self.vhost_driver = None
+
+ @mock.patch.object(NVMFDRIVER, 'get_one_accelerator')
+ def test_discover_accelerator(self, mock_get_one_accelerator):
+ expect_accelerator = {
+ 'server': 'nvmf',
+ 'bdevs': [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }],
+ 'subsystems': [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ }
+ alive = mock.Mock(return_value=False)
+ self.nvmf_driver.py.is_alive = alive
+ check_error = mock.Mock(return_value=False)
+ common_fun.check_for_setup_error = check_error
+ self.assertFalse(
+ mock_get_one_accelerator.called,
+ "Failed to discover_accelerator if py not alive."
+ )
+ alive = mock.Mock(return_value=True)
+ self.nvmf_driver.py.is_alive = alive
+ check_error = mock.Mock(return_value=True)
+ common_fun.check_for_setup_error = check_error
+ acce_client = NvmfTgt(self.nvmf_driver.py)
+ bdevs_fake = [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }]
+ bdev_list = mock.Mock(return_value=bdevs_fake)
+ acce_client.get_bdevs = bdev_list
+ subsystems_fake = [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ subsystem_list = mock.Mock(return_value=subsystems_fake)
+ acce_client.get_nvmf_subsystems = subsystem_list
+ accelerator_fake = {
+ 'server': self.nvmf_driver.SERVER,
+ 'bdevs': acce_client.get_bdevs(),
+ 'subsystems': acce_client.get_nvmf_subsystems()
+ }
+ success_send = mock.Mock(return_value=accelerator_fake)
+ self.nvmf_driver.get_one_accelerator = success_send
+ accelerator = self.nvmf_driver.discover_accelerator()
+ self.assertEqual(accelerator, expect_accelerator)
+
+ def test_accelerator_list(self):
+ expect_accelerators = [{
+ 'server': 'nvmf',
+ 'bdevs': [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }],
+ 'subsystems':
+ [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ },
+ {
+ 'server': 'nvnf_tgt',
+ 'bdevs': [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }],
+ 'subsystems':
+ [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ }
+ ]
+ success_send = mock.Mock(return_value=expect_accelerators)
+ self.nvmf_driver.get_all_accelerators = success_send
+ self.assertEqual(self.nvmf_driver.accelerator_list(),
+ expect_accelerators)
+
+ def test_install_accelerator(self):
+ pass
+
+ def test_uninstall_accelerator(self):
+ pass
+
+ def test_update(self):
+ pass
+
+ def test_attach_instance(self):
+ pass
+
+ def test_detach_instance(self):
+ pass
+
+ def test_delete_subsystem(self):
+ pass
+
+ def test_construct_subsystem(self):
+ pass
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/test_vhost.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/test_vhost.py
new file mode 100644
index 0000000..3c04b8c
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/test_vhost.py
@@ -0,0 +1,144 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cyborg.tests import base
+import mock
+from cyborg.accelerator.drivers.spdk.vhost.vhost import VHOSTDRIVER
+from cyborg.accelerator.drivers.spdk.util import common_fun
+from cyborg.accelerator.drivers.spdk.util.pyspdk.vhost_client import VhostTgt
+
+
+class TestVHOSTDRIVER(base.TestCase):
+
+ def setUp(self):
+ super(TestVHOSTDRIVER, self).setUp()
+ self.vhost_driver = VHOSTDRIVER()
+
+ def tearDown(self):
+ super(TestVHOSTDRIVER, self).tearDown()
+ self.vhost_driver = None
+
+ @mock.patch.object(VHOSTDRIVER, 'get_one_accelerator')
+ def test_discover_accelerator(self, mock_get_one_accelerator):
+ expect_accelerator = {
+ 'server': 'vhost',
+ 'bdevs': [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }],
+ 'scsi_devices': [],
+ 'luns': [{"claimed": True,
+ "name": "Malloc0"}],
+ 'interfaces': [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ }
+ alive = mock.Mock(return_value=True)
+ self.vhost_driver.py.is_alive = alive
+ check_error = mock.Mock(return_value=True)
+ common_fun.check_for_setup_error = check_error
+ self.assertFalse(
+ mock_get_one_accelerator.called,
+ "Failed to discover_accelerator if py not alive."
+ )
+ acce_client = VhostTgt(self.vhost_driver.py)
+ bdevs_fake = [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }]
+ bdev_list = mock.Mock(return_value=bdevs_fake)
+ acce_client.get_bdevs = bdev_list
+ scsi_devices_fake = []
+ scsi_device_list = mock.Mock(return_value=scsi_devices_fake)
+ acce_client.get_scsi_devices = scsi_device_list
+ luns_fake = [{"claimed": True,
+ "name": "Malloc0"}]
+ lun_list = mock.Mock(return_value=luns_fake)
+ acce_client.get_luns = lun_list
+ interfaces_fake = \
+ [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ interface_list = mock.Mock(return_value=interfaces_fake)
+ acce_client.get_interfaces = interface_list
+ accelerator_fake = {
+ 'server': self.vhost_driver.SERVER,
+ 'bdevs': acce_client.get_bdevs(),
+ 'scsi_devices': acce_client.get_scsi_devices(),
+ 'luns': acce_client.get_luns(),
+ 'interfaces': acce_client.get_interfaces()
+ }
+ success_send = mock.Mock(return_value=accelerator_fake)
+ self.vhost_driver.get_one_accelerator = success_send
+ accelerator = self.vhost_driver.discover_accelerator()
+ self.assertEqual(accelerator, expect_accelerator)
+
+ def test_accelerator_list(self):
+ expect_accelerators = [{
+ 'server': 'vhost',
+ 'bdevs': [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }],
+ 'scsi_devices': [],
+ 'luns': [{"claimed": True,
+ "name": "Malloc0"}],
+ 'interfaces': [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ },
+ {
+ 'server': 'vhost_tgt',
+ 'bdevs': [{"num_blocks": 131072,
+ "name": "nvme1",
+ "block_size": 512
+ }],
+ 'scsi_devices': [],
+ 'luns': [{"claimed": True,
+ "name": "Malloc0"}],
+ 'interfaces': [{"core": 0,
+ "nqn": "nqn.2018-01.org.nvmexpress.discovery",
+ "hosts": []
+ }]
+ }
+ ]
+ success_send = mock.Mock(return_value=expect_accelerators)
+ self.vhost_driver.get_all_accelerators = success_send
+ self.assertEqual(self.vhost_driver.accelerator_list(),
+ expect_accelerators)
+
+ def test_install_accelerator(self):
+ pass
+
+ def test_uninstall_accelerator(self):
+ pass
+
+ def test_update(self):
+ pass
+
+ def test_attach_instance(self):
+ pass
+
+ def test_detach_instance(self):
+ pass
+
+ def test_delete_ip_address(self):
+ pass
+
+ def test_add_ip_address(self):
+ pass
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/test_resource_tracker.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/test_resource_tracker.py
new file mode 100644
index 0000000..8f277c0
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/test_resource_tracker.py
@@ -0,0 +1,91 @@
+# Copyright (c) 2018 Intel.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""Cyborg agent resource_tracker test cases."""
+
+import os
+
+import fixtures
+
+from cyborg.accelerator.drivers.fpga import utils
+from cyborg.accelerator.drivers.fpga.intel import sysinfo
+from cyborg.agent.resource_tracker import ResourceTracker
+from cyborg.conductor import rpcapi as cond_api
+from cyborg.conf import CONF
+from cyborg.tests import base
+from cyborg.tests.unit.accelerator.drivers.fpga.intel import prepare_test_data
+
+
+class TestResourceTracker(base.TestCase):
+ """Test Agent ResourceTracker """
+
+ def setUp(self):
+ super(TestResourceTracker, self).setUp()
+ self.syspath = sysinfo.SYS_FPGA
+ sysinfo.SYS_FPGA = "/sys/class/fpga"
+ tmp_sys_dir = self.useFixture(fixtures.TempDir())
+ prepare_test_data.create_fake_sysfs(tmp_sys_dir.path)
+ sysinfo.SYS_FPGA = os.path.join(
+ tmp_sys_dir.path, sysinfo.SYS_FPGA.split("/", 1)[-1])
+ utils.SYS_FPGA_PATH = sysinfo.SYS_FPGA
+ self.host = CONF.host
+ self.cond_api = cond_api.ConductorAPI()
+ self.rt = ResourceTracker(self.host, self.cond_api)
+
+ def tearDown(self):
+ super(TestResourceTracker, self).tearDown()
+ sysinfo.SYS_FPGA = self.syspath
+ utils.SYS_FPGA_PATH = self.syspath
+
+ def test_update_usage(self):
+ """Update the resource usage and stats after a change in an
+ instance
+ """
+ # FIXME(Shaohe Feng) need add testcase. How to check the fpgas
+ # has stored into DB by conductor correctly?
+ pass
+
+ def test_get_fpga_devices(self):
+ expect = {
+ '0000:5e:00.0': {
+ 'function': 'pf', 'assignable': False, 'pr_num': '1',
+ 'name': 'intel-fpga-dev.0', 'vendor_id': '0x8086',
+ 'devices': '0000:5e:00.0',
+ 'regions': [{
+ 'function': 'vf', 'assignable': True,
+ 'name': 'intel-fpga-dev.2', 'vendor_id': '0x8086',
+ 'devices': '0000:5e:00.1',
+ 'parent_devices': '0000:5e:00.0',
+ 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc1'}],
+ 'parent_devices': '',
+ 'path': '%s/intel-fpga-dev.0' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc0'},
+ '0000:5e:00.1': {
+ 'function': 'vf', 'assignable': True,
+ 'name': 'intel-fpga-dev.2', 'vendor_id': '0x8086',
+ 'devices': '0000:5e:00.1',
+ 'parent_devices': '0000:5e:00.0',
+ 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc1'},
+ '0000:be:00.0': {
+ 'function': 'pf', 'assignable': True, 'pr_num': '0',
+ 'name': 'intel-fpga-dev.1', 'vendor_id': '0x8086',
+ 'devices': '0000:be:00.0', 'parent_devices': '',
+ 'path': '%s/intel-fpga-dev.1' % sysinfo.SYS_FPGA,
+ 'product_id': '0xbcc0'}}
+ fpgas = self.rt._get_fpga_devices()
+ self.assertDictEqual(expect, fpgas)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/base.py
new file mode 100644
index 0000000..75578fd
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/base.py
@@ -0,0 +1,214 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Base classes for API tests."""
+
+from oslo_config import cfg
+import pecan
+import pecan.testing
+
+from cyborg.tests.unit.db import base
+
+
+cfg.CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token')
+
+
+class BaseApiTest(base.DbTestCase):
+ """Pecan controller functional testing class.
+
+ Used for functional tests of Pecan controllers where you need to
+ test your literal application and its integration with the
+ framework.
+ """
+
+ PATH_PREFIX = ''
+
+ def setUp(self):
+ super(BaseApiTest, self).setUp()
+ cfg.CONF.set_override("auth_version", "v3",
+ group='keystone_authtoken')
+ cfg.CONF.set_override("admin_user", "admin",
+ group='keystone_authtoken')
+ self.app = self._make_app()
+
+ def reset_pecan():
+ pecan.set_config({}, overwrite=True)
+
+ self.addCleanup(reset_pecan)
+
+ def _make_app(self):
+ # Determine where we are so we can set up paths in the config
+ root_dir = self.get_path()
+
+ self.app_config = {
+ 'app': {
+ 'root': 'cyborg.api.controllers.root.RootController',
+ 'modules': ['cyborg.api'],
+ 'static_root': '%s/public' % root_dir,
+ 'template_path': '%s/api/templates' % root_dir,
+ 'acl_public_routes': ['/', '/v1/.*'],
+ },
+ }
+ return pecan.testing.load_test_app(self.app_config)
+
+ def _request_json(self, path, params, expect_errors=False, headers=None,
+ method="post", extra_environ=None, status=None):
+ """Sends simulated HTTP request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param method: Request method type. Appropriate method function call
+ should be used rather than passing attribute in.
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ response = getattr(self.app, "%s_json" % method)(
+ str(path),
+ params=params,
+ headers=headers,
+ status=status,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors
+ )
+ return response
+
+ def post_json(self, path, params, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP POST request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ full_path = self.PATH_PREFIX + path
+ return self._request_json(path=full_path, params=params,
+ expect_errors=expect_errors,
+ headers=headers, extra_environ=extra_environ,
+ status=status, method="post")
+
+ def gen_headers(self, context, **kw):
+ """Generate a header for a simulated HTTP request to Pecan test app.
+
+ :param context: context that store the client user information.
+ :param kw: key word aguments, used to overwrite the context attribute.
+
+ note: "is_public_api" is not in headers, it should be in environ
+ variables to send along with the request. We can pass it by
+ extra_environ when we call delete, get_json or other method request.
+ """
+ ct = context.to_dict()
+ ct.update(kw)
+ headers = {
+ 'X-User-Name': ct.get("user_name") or "user",
+ 'X-User-Id':
+ ct.get("user") or "1d6d686bc2c949ddb685ffb4682e0047",
+ 'X-Project-Name': ct.get("project_name") or "project",
+ 'X-Project-Id':
+ ct.get("tenant") or "86f64f561b6d4f479655384572727f70",
+ 'X-User-Domain-Id':
+ ct.get("domain_id") or "bd5eeb7d0fb046daaf694b36f4df5518",
+ 'X-User-Domain-Name': ct.get("domain_name") or "no_domain",
+ 'X-Auth-Token':
+ ct.get("auth_token") or "b9764005b8c145bf972634fb16a826e8",
+ 'X-Roles': ct.get("roles") or "cyborg"
+ }
+
+ return headers
+
+ def get_json(self, path, expect_errors=False, headers=None,
+ extra_environ=None, q=None, **params):
+ """Sends simulated HTTP GET request to Pecan test app.
+
+ :param path: url path of target service
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param q: list of queries consisting of: field, value, op, and type
+ keys
+ :param path_prefix: prefix of the url path
+ :param params: content for wsgi.input of request
+ """
+ full_path = self.PATH_PREFIX + path
+ q = q or []
+ query_params = {
+ 'q.field': [],
+ 'q.value': [],
+ 'q.op': [],
+ }
+ for query in q:
+ for name in ['field', 'op', 'value']:
+ query_params['q.%s' % name].append(query.get(name, ''))
+ all_params = {}
+ all_params.update(params)
+ if q:
+ all_params.update(query_params)
+ response = self.app.get(full_path,
+ params=all_params,
+ headers=headers,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors)
+ if not expect_errors:
+ response = response.json
+ return response
+
+ def patch_json(self, path, params, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP PATCH request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ full_path = self.PATH_PREFIX + path
+ return self._request_json(path=full_path, params=params,
+ expect_errors=expect_errors,
+ headers=headers, extra_environ=extra_environ,
+ status=status, method="put")
+
+ def delete(self, path, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP DELETE request to Pecan test app.
+
+ :param path: url path of target service
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ full_path = self.PATH_PREFIX + path
+ response = self.app.delete(full_path,
+ headers=headers,
+ status=status,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors)
+ return response
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/base.py
new file mode 100644
index 0000000..c16eaee
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/base.py
@@ -0,0 +1,21 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cyborg.tests.unit.api import base
+
+
+class APITestV1(base.BaseApiTest):
+
+ PATH_PREFIX = '/v1'
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/test_accelerators.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/test_accelerators.py
new file mode 100644
index 0000000..9f606a4
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/test_accelerators.py
@@ -0,0 +1,174 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import mock
+from oslo_utils import timeutils
+from six.moves import http_client
+
+from cyborg.conductor import rpcapi
+from cyborg.tests.unit.api.controllers.v1 import base as v1_test
+from cyborg.tests.unit.db import utils as db_utils
+from cyborg.tests.unit.objects import utils as obj_utils
+
+def gen_post_body(**kw):
+ return db_utils.get_test_accelerator(**kw)
+
+
+def _rpcapi_accelerator_create(context, obj_acc):
+ """Fake used to mock out the conductor RPCAPI's accelerator_create method.
+
+ Performs creation of the accelerator object and returns the created
+ accelerator as-per the real method.
+ """
+ obj_acc.create(context)
+ return obj_acc
+
+
+
+class TestPost(v1_test.APITestV1):
+
+ ACCELERATOR_UUID = '10efe63d-dfea-4a37-ad94-4116fba50981'
+
+ def setUp(self):
+ super(TestPost, self).setUp()
+ self.headers = self.gen_headers(self.context)
+
+ p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_create')
+ self.mock_create = p.start()
+ self.mock_create.side_effect = _rpcapi_accelerator_create
+ self.addCleanup(p.stop)
+
+ @mock.patch('oslo_utils.uuidutils.generate_uuid')
+ def test_post(self, mock_uuid):
+ mock_uuid.return_value = self.ACCELERATOR_UUID
+
+ body = gen_post_body(name='post_accelerator')
+ response = self.post_json('/accelerators', body, headers=self.headers)
+ self.assertEqual(http_client.CREATED, response.status_int)
+ response = response.json
+ self.assertEqual(self.ACCELERATOR_UUID, response['uuid'])
+ self.assertEqual(body['name'], response['name'])
+ self.mock_create.assert_called_once_with(mock.ANY, mock.ANY, mock.ANY)
+
+
+class TestList(v1_test.APITestV1):
+
+ def setUp(self):
+ super(TestList, self).setUp()
+ self.accs = []
+ for i in range(3):
+ acc = obj_utils.create_test_accelerator(self.context)
+ self.accs.append(acc)
+ self.acc = self.accs[0]
+ self.context.tenant = self.acc.project_id
+ self.headers = self.gen_headers(self.context)
+
+ def test_get_one(self):
+ data = self.get_json('/accelerators/%s' % self.acc.uuid,
+ headers=self.headers)
+ self.assertEqual(self.acc.uuid, data['uuid'])
+ self.assertIn('acc_capability', data)
+ self.assertIn('acc_type', data)
+ self.assertIn('created_at', data)
+ self.assertIn('description', data)
+ self.assertIn('device_type', data)
+ self.assertIn('links', data)
+ self.assertIn('name', data)
+ self.assertIn('product_id', data)
+ self.assertIn('project_id', data)
+ self.assertIn('remotable', data)
+ self.assertIn('updated_at', data)
+ self.assertIn('user_id', data)
+ self.assertIn('vendor_id', data)
+
+ def test_get_all(self):
+ data = self.get_json('/accelerators', headers=self.headers)
+ self.assertEqual(3, len(data['accelerators']))
+ data_uuids = [d['uuid'] for d in data['accelerators']]
+ acc_uuids = [acc.uuid for acc in self.accs]
+ self.assertItemsEqual(acc_uuids, data_uuids)
+
+
+def _rpcapi_accelerator_update(context, obj_acc):
+ """Fake used to mock out the conductor RPCAPI's accelerator_update method.
+
+ Performs update of the accelerator object and returns the updated
+ accelerator as-per the real method.
+ """
+ obj_acc.save(context)
+ return obj_acc
+
+
+class TestPut(v1_test.APITestV1):
+
+ def setUp(self):
+ super(TestPut, self).setUp()
+ self.acc = obj_utils.create_test_accelerator(self.context)
+ self.context.tenant = self.acc.project_id
+ self.headers = self.gen_headers(self.context)
+
+ p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_update')
+ self.mock_update = p.start()
+ self.mock_update.side_effect = _rpcapi_accelerator_update
+ self.addCleanup(p.stop)
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_put(self, mock_utcnow):
+ test_time = datetime.datetime(2012, 12, 12, 12, 12)
+ mock_utcnow.return_value = test_time
+
+ description = 'new-description'
+ response = self.patch_json('/accelerators/%s' % self.acc.uuid,
+ [{'path': '/description',
+ 'value': description,
+ 'op': 'replace'}],
+ headers=self.headers)
+ self.assertEqual(http_client.OK, response.status_code)
+ data = self.get_json('/accelerators/%s' % self.acc.uuid,
+ headers=self.headers)
+ self.assertEqual(description, data['description'])
+ return_updated_at = timeutils.parse_isotime(
+ data['updated_at']).replace(tzinfo=None)
+ self.assertEqual(test_time, return_updated_at)
+ self.mock_update.assert_called_once_with(mock.ANY, mock.ANY)
+
+
+def _rpcapi_accelerator_delete(context, obj_acc):
+ """Fake used to mock out the conductor RPCAPI's accelerator_delete method.
+
+ Performs deletion of the accelerator object as-per the real method.
+ """
+ obj_acc.destroy(context)
+
+
+class TestDelete(v1_test.APITestV1):
+
+ def setUp(self):
+ super(TestDelete, self).setUp()
+ self.acc = obj_utils.create_test_accelerator(self.context)
+ self.context.tenant = self.acc.project_id
+ self.headers = self.gen_headers(self.context)
+
+ p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_delete')
+ self.mock_delete = p.start()
+ self.mock_delete.side_effect = _rpcapi_accelerator_delete
+ self.addCleanup(p.stop)
+
+ def test_delete(self):
+ response = self.delete('/accelerators/%s' % self.acc.uuid,
+ headers=self.headers)
+ self.assertEqual(http_client.NO_CONTENT, response.status_code)
+ self.mock_delete.assert_called_once_with(mock.ANY, mock.ANY)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/base.py
new file mode 100644
index 0000000..1d40ce0
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/base.py
@@ -0,0 +1,71 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Cyborg DB test base class."""
+
+import fixtures
+from oslo_config import cfg
+from oslo_db.sqlalchemy import enginefacade
+
+from cyborg.db import api as dbapi
+from cyborg.db.sqlalchemy import migration
+from cyborg.db.sqlalchemy import models
+from cyborg.tests import base
+
+
+CONF = cfg.CONF
+_DB_CACHE = None
+
+
+class Database(fixtures.Fixture):
+
+ def __init__(self, engine, db_migrate, sql_connection):
+ self.sql_connection = sql_connection
+
+ self.engine = engine
+ self.engine.dispose()
+ conn = self.engine.connect()
+ self.setup_sqlite(db_migrate)
+
+ self._DB = ''.join(line for line in conn.connection.iterdump())
+ self.engine.dispose()
+
+ def setup_sqlite(self, db_migrate):
+ if db_migrate.version():
+ return
+ models.Base.metadata.create_all(self.engine)
+ db_migrate.stamp('head')
+
+ def setUp(self):
+ super(Database, self).setUp()
+
+ conn = self.engine.connect()
+ conn.connection.executescript(self._DB)
+ self.addCleanup(self.engine.dispose)
+
+
+class DbTestCase(base.TestCase):
+
+ def setUp(self):
+ super(DbTestCase, self).setUp()
+
+ self.dbapi = dbapi.get_instance()
+
+ global _DB_CACHE
+ if not _DB_CACHE:
+ engine = enginefacade.get_legacy_facade().get_engine()
+ _DB_CACHE = Database(engine, migration,
+ sql_connection=CONF.database.connection)
+ self.useFixture(_DB_CACHE)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/utils.py
new file mode 100644
index 0000000..8290af1
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/utils.py
@@ -0,0 +1,31 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Cyborg db test utilities."""
+
+
+def get_test_accelerator(**kw):
+ return {
+ 'name': kw.get('name', 'name'),
+ 'description': kw.get('description', 'description'),
+ 'device_type': kw.get('device_type', 'device_type'),
+ 'acc_type': kw.get('acc_type', 'acc_type'),
+ 'acc_capability': kw.get('acc_capability', 'acc_capability'),
+ 'vendor_id': kw.get('vendor_id', 'vendor_id'),
+ 'product_id': kw.get('product_id', 'product_id'),
+ 'remotable': kw.get('remotable', 1),
+ 'project_id': kw.get('project_id', 'b492a6fb12964ae3bd291ce585107d48'),
+ 'user_id': kw.get('user_id', '7009409e21614d1db1ef7a8c5ee101d8'),
+ }
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_accelerator.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_accelerator.py
new file mode 100644
index 0000000..da592e9
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_accelerator.py
@@ -0,0 +1,66 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+
+
+def fake_db_accelerator(**updates):
+ db_accelerator = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': uuidutils.generate_uuid(),
+ 'name': 'fake-name',
+ 'description': 'fake-desc',
+ 'project_id': 'fake-pid',
+ 'user_id': 'fake-uid',
+ 'device_type': 'fake-dtype',
+ 'acc_type': 'fake-acc_type',
+ 'acc_capability': 'fake-cap',
+ 'vendor_id': 'fake-vid',
+ 'product_id': 'fake-pid',
+ 'remotable': 0
+ }
+
+ for name, field in objects.Accelerator.fields.items():
+ if name in db_accelerator:
+ continue
+ if field.nullable:
+ db_accelerator[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_accelerator[name] = field.default
+ else:
+ raise Exception('fake_db_accelerator needs help with %s' % name)
+
+ if updates:
+ db_accelerator.update(updates)
+
+ return db_accelerator
+
+
+def fake_accelerator_obj(context, obj_accelerator_class=None, **updates):
+ if obj_accelerator_class is None:
+ obj_accelerator_class = objects.Accelerator
+ expected_attrs = updates.pop('expected_attrs', None)
+ acc = obj_instance_class._from_db_object(context,
+ obj_instance_class(),
+ fake_db_instance(**updates),
+ expected_attrs=expected_attrs)
+ acc.obj_reset_changes()
+ return acc \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_deployable.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_deployable.py
new file mode 100644
index 0000000..0f1c8c8
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_deployable.py
@@ -0,0 +1,70 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+
+
+def fake_db_deployable(**updates):
+ root_uuid = uuidutils.generate_uuid()
+ db_deployable = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': root_uuid,
+ 'name': 'dp_name',
+ 'parent_uuid': None,
+ 'root_uuid': root_uuid,
+ 'pcie_address': '00:7f:0b.2',
+ 'host': 'host_name',
+ 'board': 'KU115',
+ 'vendor': 'Xilinx',
+ 'version': '1.0',
+ 'type': 'pf',
+ 'assignable': True,
+ 'instance_uuid': None,
+ 'availability': 'Available',
+ 'accelerator_id': 1
+ }
+
+ for name, field in objects.Deployable.fields.items():
+ if name in db_deployable:
+ continue
+ if field.nullable:
+ db_deployable[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_deployable[name] = field.default
+ else:
+ raise Exception('fake_db_deployable needs help with %s' % name)
+
+ if updates:
+ db_deployable.update(updates)
+
+ return db_deployable
+
+
+def fake_deployable_obj(context, obj_dpl_class=None, **updates):
+ if obj_dpl_class is None:
+ obj_dpl_class = objects.Deployable
+ expected_attrs = updates.pop('expected_attrs', None)
+ deploy = obj_dpl_class._from_db_object(context,
+ obj_dpl_class(),
+ fake_db_deployable(**updates),
+ expected_attrs=expected_attrs)
+ deploy.obj_reset_changes()
+ return deploy \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_physical_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_physical_function.py
new file mode 100644
index 0000000..4a2bbb7
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_physical_function.py
@@ -0,0 +1,72 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+from cyborg.objects import physical_function
+
+
+def fake_db_physical_function(**updates):
+ root_uuid = uuidutils.generate_uuid()
+ db_physical_function = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': root_uuid,
+ 'name': 'dp_name',
+ 'parent_uuid': None,
+ 'root_uuid': root_uuid,
+ 'pcie_address': '00:7f:0b.2',
+ 'host': 'host_name',
+ 'board': 'KU115',
+ 'vendor': 'Xilinx',
+ 'version': '1.0',
+ 'type': 'pf',
+ 'assignable': True,
+ 'instance_uuid': None,
+ 'availability': 'Available',
+ 'accelerator_id': 1
+ }
+
+ for name, field in physical_function.PhysicalFunction.fields.items():
+ if name in db_physical_function:
+ continue
+ if field.nullable:
+ db_physical_function[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_physical_function[name] = field.default
+ else:
+ raise Exception('fake_db_physical_function needs help with %s'
+ % name)
+
+ if updates:
+ db_physical_function.update(updates)
+
+ return db_physical_function
+
+
+def fake_physical_function_obj(context, obj_pf_class=None, **updates):
+ if obj_pf_class is None:
+ obj_pf_class = objects.VirtualFunction
+ expected_attrs = updates.pop('expected_attrs', None)
+ pf = obj_pf_class._from_db_object(context,
+ obj_pf_class(),
+ fake_db_physical_function(**updates),
+ expected_attrs=expected_attrs)
+ pf.obj_reset_changes()
+ return pf
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_virtual_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_virtual_function.py
new file mode 100644
index 0000000..8184b0f
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_virtual_function.py
@@ -0,0 +1,72 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+from cyborg.objects import virtual_function
+
+
+def fake_db_virtual_function(**updates):
+ root_uuid = uuidutils.generate_uuid()
+ db_virtual_function = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': root_uuid,
+ 'name': 'dp_name',
+ 'parent_uuid': None,
+ 'root_uuid': root_uuid,
+ 'pcie_address': '00:7f:bb.2',
+ 'host': 'host_name',
+ 'board': 'KU115',
+ 'vendor': 'Xilinx',
+ 'version': '1.0',
+ 'type': 'vf',
+ 'assignable': True,
+ 'instance_uuid': None,
+ 'availability': 'Available',
+ 'accelerator_id': 1
+ }
+
+ for name, field in virtual_function.VirtualFunction.fields.items():
+ if name in db_virtual_function:
+ continue
+ if field.nullable:
+ db_virtual_function[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_virtual_function[name] = field.default
+ else:
+ raise Exception('fake_db_virtual_function needs help with %s'
+ % name)
+
+ if updates:
+ db_virtual_function.update(updates)
+
+ return db_virtual_function
+
+
+def fake_virtual_function_obj(context, obj_vf_class=None, **updates):
+ if obj_vf_class is None:
+ obj_vf_class = objects.VirtualFunction
+ expected_attrs = updates.pop('expected_attrs', None)
+ vf = obj_vf_class._from_db_object(context,
+ obj_vf_class(),
+ fake_db_virtual_function(**updates),
+ expected_attrs=expected_attrs)
+ vf.obj_reset_changes()
+ return vf
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_accelerator.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_accelerator.py
new file mode 100644
index 0000000..1141d8c
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_accelerator.py
@@ -0,0 +1,104 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestAcceleratorObject(DbTestCase):
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_accelerator(id=2)
+ return db_acc
+
+ @mock.patch.object(db.api.Connection, 'accelerator_create')
+ def test_create(self, mock_create):
+ mock_create.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_create.return_value)
+ acc.create(self.context)
+
+ self.assertEqual(self.fake_accelerator['id'], acc.id)
+
+ @mock.patch.object(db.api.Connection, 'accelerator_get')
+ def test_get(self, mock_get):
+ mock_get.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_get.return_value)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc['uuid'])
+ self.assertEqual(acc_get.uuid, acc.uuid)
+
+ @mock.patch.object(db.api.Connection, 'accelerator_update')
+ def test_save(self, mock_save):
+ mock_save.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_save.return_value)
+ acc.create(self.context)
+ acc.name = 'test_save'
+ acc.save(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc['uuid'])
+ self.assertEqual(acc_get.name, 'test_save')
+
+ @mock.patch.object(db.api.Connection, 'accelerator_delete')
+ def test_destroy(self, mock_destroy):
+ mock_destroy.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_destroy.return_value)
+ acc.create(self.context)
+ self.assertEqual(self.fake_accelerator['id'], acc.id)
+ acc.destroy(self.context)
+ self.assertRaises(exception.AcceleratorNotFound,
+ objects.Accelerator.get, self.context,
+ acc['uuid'])
+
+
+class TestAcceleratorObject(test_objects._LocalTest,
+ _TestAcceleratorObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ accelerator = fake_accelerator.fake_accelerator_obj(self.context)
+ fields_with_save_methods = [field for field in accelerator.fields
+ if hasattr(accelerator,
+ '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(accelerator, '_save_%s' % field)
+ @mock.patch.object(accelerator, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ accelerator.obj_reset_changes(fields=[field])
+ accelerator._changed_fields.add(field)
+ self.assertRaises(expected_exception, accelerator.save)
+ accelerator.obj_reset_changes(fields=[field])
+ _test() \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_deployable.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_deployable.py
new file mode 100644
index 0000000..fe3c6fc
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_deployable.py
@@ -0,0 +1,151 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit import fake_deployable
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestDeployableObject(DbTestCase):
+ @property
+ def fake_deployable(self):
+ db_deploy = fake_deployable.fake_db_deployable(id=2)
+ return db_deploy
+
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_acceleraotr(id=2)
+ return db_acc
+
+ def test_create(self, mock_create):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ self.assertEqual(db_dpl['uuid'], dpl.uuid)
+
+
+
+ def test_get(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ dpl_get = objects.Deployable.get(self.context, dpl.uuid)
+ self.assertEqual(dpl_get.uuid, dpl.uuid)
+
+ def test_get_by_filter(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ query = {"uuid": dpl['uuid']}
+ dpl_get_list = objects.Deployable.get_by_filter(self.context, query)
+
+ self.assertEqual(dpl_get_list[0].uuid, dpl.uuid)
+
+ def test_save(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ dpl.host = 'test_save'
+ dpl.save(self.context)
+ dpl_get = objects.Deployable.get(self.context, dpl.uuid)
+ self.assertEqual(dpl_get.host, 'test_save')
+
+ def test_destroy(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ self.assertEqual(db_dpl['uuid'], dpl.uuid)
+ dpl.destroy(self.context)
+ self.assertRaises(exception.DeployableNotFound,
+ objects.Deployable.get, self.context,
+ dpl.uuid)
+
+
+class TestDeployableObject(test_objects._LocalTest,
+ _TestDeployableObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ deployable = fake_deployable.fake_deployable_obj(self.context)
+ fields_with_save_methods = [field for field in deployable.fields
+ if hasattr(deployable, '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(deployable, '_save_%s' % field)
+ @mock.patch.object(deployable, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ deployable.obj_reset_changes(fields=[field])
+ deployable._changed_fields.add(field)
+ self.assertRaises(expected_exception, deployable.save)
+ deployable.obj_reset_changes(fields=[field])
+ _test() \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_object.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_object.py
new file mode 100644
index 0000000..35b574d
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_object.py
@@ -0,0 +1,226 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import contextlib
+import copy
+import datetime
+import inspect
+import os
+import pprint
+
+import fixtures
+import mock
+from oslo_log import log
+from oslo_utils import timeutils
+from oslo_versionedobjects import base as ovo_base
+from oslo_versionedobjects import exception as ovo_exc
+from oslo_versionedobjects import fixture
+import six
+
+from oslo_context import context
+
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg.objects import fields
+from cyborg import tests as test
+
+
+LOG = log.getLogger(__name__)
+
+
+class MyOwnedObject(base.CyborgPersistentObject, base.CyborgObject):
+ VERSION = '1.0'
+ fields = {'baz': fields.IntegerField()}
+
+
+class MyObj(base.CyborgPersistentObject, base.CyborgObject,
+ base.CyborgObjectDictCompat):
+ VERSION = '1.6'
+ fields = {'foo': fields.IntegerField(default=1),
+ 'bar': fields.StringField(),
+ 'missing': fields.StringField(),
+ 'readonly': fields.IntegerField(read_only=True),
+ 'rel_object': fields.ObjectField('MyOwnedObject', nullable=True),
+ 'rel_objects': fields.ListOfObjectsField('MyOwnedObject',
+ nullable=True),
+ 'mutable_default': fields.ListOfStringsField(default=[]),
+ }
+
+ @staticmethod
+ def _from_db_object(context, obj, db_obj):
+ self = MyObj()
+ self.foo = db_obj['foo']
+ self.bar = db_obj['bar']
+ self.missing = db_obj['missing']
+ self.readonly = 1
+ self._context = context
+ return self
+
+ def obj_load_attr(self, attrname):
+ setattr(self, attrname, 'loaded!')
+
+ def query(cls, context):
+ obj = cls(context=context, foo=1, bar='bar')
+ obj.obj_reset_changes()
+ return obj
+
+ def marco(self):
+ return 'polo'
+
+ def _update_test(self):
+ self.bar = 'updated'
+
+ def save(self):
+ self.obj_reset_changes()
+
+ def refresh(self):
+ self.foo = 321
+ self.bar = 'refreshed'
+ self.obj_reset_changes()
+
+ def modify_save_modify(self):
+ self.bar = 'meow'
+ self.save()
+ self.foo = 42
+ self.rel_object = MyOwnedObject(baz=42)
+
+ def obj_make_compatible(self, primitive, target_version):
+ super(MyObj, self).obj_make_compatible(primitive, target_version)
+ # NOTE(danms): Simulate an older version that had a different
+ # format for the 'bar' attribute
+ if target_version == '1.1' and 'bar' in primitive:
+ primitive['bar'] = 'old%s' % primitive['bar']
+
+
+class RandomMixInWithNoFields(object):
+ """Used to test object inheritance using a mixin that has no fields."""
+ pass
+
+
+@base.CyborgObjectRegistry.register_if(False)
+class TestSubclassedObject(RandomMixInWithNoFields, MyObj):
+ fields = {'new_field': fields.StringField()}
+
+
+class TestObjToPrimitive(test.base.TestCase):
+
+ def test_obj_to_primitive_list(self):
+ @base.CyborgObjectRegistry.register_if(False)
+ class MyObjElement(base.CyborgObject):
+ fields = {'foo': fields.IntegerField()}
+
+ def __init__(self, foo):
+ super(MyObjElement, self).__init__()
+ self.foo = foo
+
+ @base.CyborgObjectRegistry.register_if(False)
+ class MyList(base.ObjectListBase, base.CyborgObject):
+ fields = {'objects': fields.ListOfObjectsField('MyObjElement')}
+
+ mylist = MyList()
+ mylist.objects = [MyObjElement(1), MyObjElement(2), MyObjElement(3)]
+ self.assertEqual([1, 2, 3],
+ [x['foo'] for x in base.obj_to_primitive(mylist)])
+
+ def test_obj_to_primitive_dict(self):
+ base.CyborgObjectRegistry.register(MyObj)
+ myobj = MyObj(foo=1, bar='foo')
+ self.assertEqual({'foo': 1, 'bar': 'foo'},
+ base.obj_to_primitive(myobj))
+
+ def test_obj_to_primitive_recursive(self):
+ base.CyborgObjectRegistry.register(MyObj)
+
+ class MyList(base.ObjectListBase, base.CyborgObject):
+ fields = {'objects': fields.ListOfObjectsField('MyObj')}
+
+ mylist = MyList(objects=[MyObj(), MyObj()])
+ for i, value in enumerate(mylist):
+ value.foo = i
+ self.assertEqual([{'foo': 0}, {'foo': 1}],
+ base.obj_to_primitive(mylist))
+
+ def test_obj_to_primitive_with_ip_addr(self):
+ @base.CyborgObjectRegistry.register_if(False)
+ class TestObject(base.CyborgObject):
+ fields = {'addr': fields.IPAddressField(),
+ 'cidr': fields.IPNetworkField()}
+
+ obj = TestObject(addr='1.2.3.4', cidr='1.1.1.1/16')
+ self.assertEqual({'addr': '1.2.3.4', 'cidr': '1.1.1.1/16'},
+ base.obj_to_primitive(obj))
+
+
+def compare_obj(test, obj, db_obj, subs=None, allow_missing=None,
+ comparators=None):
+ """Compare a CyborgObject and a dict-like database object.
+ This automatically converts TZ-aware datetimes and iterates over
+ the fields of the object.
+ :param:test: The TestCase doing the comparison
+ :param:obj: The CyborgObject to examine
+ :param:db_obj: The dict-like database object to use as reference
+ :param:subs: A dict of objkey=dbkey field substitutions
+ :param:allow_missing: A list of fields that may not be in db_obj
+ :param:comparators: Map of comparator functions to use for certain fields
+ """
+
+ if subs is None:
+ subs = {}
+ if allow_missing is None:
+ allow_missing = []
+ if comparators is None:
+ comparators = {}
+
+ for key in obj.fields:
+ if key in allow_missing and not obj.obj_attr_is_set(key):
+ continue
+ obj_val = getattr(obj, key)
+ db_key = subs.get(key, key)
+ db_val = db_obj[db_key]
+ if isinstance(obj_val, datetime.datetime):
+ obj_val = obj_val.replace(tzinfo=None)
+
+ if key in comparators:
+ comparator = comparators[key]
+ comparator(db_val, obj_val)
+ else:
+ test.assertEqual(db_val, obj_val)
+
+
+class _BaseTestCase(test.base.TestCase):
+ def setUp(self):
+ super(_BaseTestCase, self).setUp()
+ self.user_id = 'fake-user'
+ self.project_id = 'fake-project'
+ self.context = context.RequestContext(self.user_id, self.project_id)
+
+ base.CyborgObjectRegistry.register(MyObj)
+ base.CyborgObjectRegistry.register(MyOwnedObject)
+
+ def compare_obj(self, obj, db_obj, subs=None, allow_missing=None,
+ comparators=None):
+ compare_obj(self, obj, db_obj, subs=subs, allow_missing=allow_missing,
+ comparators=comparators)
+
+ def str_comparator(self, expected, obj_val):
+ """Compare an object field to a string in the db by performing
+ a simple coercion on the object field value.
+ """
+ self.assertEqual(expected, str(obj_val))
+
+
+class _LocalTest(_BaseTestCase):
+ def setUp(self):
+ super(_LocalTest, self).setUp() \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_physical_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_physical_function.py
new file mode 100644
index 0000000..2fa2ab1
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_physical_function.py
@@ -0,0 +1,186 @@
+import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_physical_function
+from cyborg.tests.unit import fake_virtual_function
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestPhysicalFunctionObject(DbTestCase):
+ @property
+ def fake_physical_function(self):
+ db_pf = fake_physical_function.fake_db_physical_function(id=1)
+ return db_pf
+
+ @property
+ def fake_virtual_function(self):
+ db_vf = fake_virtual_function.fake_db_virtual_function(id=3)
+ return db_vf
+
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_accelerator(id=2)
+ return db_acc
+
+ def test_create(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+
+ self.assertEqual(db_pf['uuid'], pf.uuid)
+
+ def test_get(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ self.assertEqual(pf_get.uuid, pf.uuid)
+
+ def test_get_by_filter(self):
+ db_acc = self.fake_accelerator
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = pf_get.accelerator_id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ pf_get.add_vf(vf_get)
+
+ pf_get.save(self.context)
+
+ query = {"vendor": pf['vendor']}
+ pf_get_list = objects.PhysicalFunction.get_by_filter(self.context,
+ query)
+
+ self.assertEqual(len(pf_get_list), 1)
+ self.assertEqual(pf_get_list[0].uuid, pf.uuid)
+ self.assertEqual(objects.PhysicalFunction, type(pf_get_list[0]))
+ self.assertEqual(objects.VirtualFunction,
+ type(pf_get_list[0].virtual_function_list[0]))
+ self.assertEqual(pf_get_list[0].virtual_function_list[0].uuid,
+ vf.uuid)
+
+ def test_save(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ pf_get.host = 'test_save'
+
+ pf_get.save(self.context)
+ pf_get_2 = objects.PhysicalFunction.get(self.context, pf.uuid)
+ self.assertEqual(pf_get_2.host, 'test_save')
+
+ def test_destroy(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ self.assertEqual(db_pf['uuid'], pf_get.uuid)
+ pf_get.destroy(self.context)
+ self.assertRaises(exception.DeployableNotFound,
+ objects.PhysicalFunction.get, self.context,
+ pf_get['uuid'])
+
+ def test_add_vf(self):
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = pf_get.accelerator_id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+
+ pf_get.add_vf(vf_get)
+
+ pf_get.save(self.context)
+ pf_get_2 = objects.PhysicalFunction.get(self.context, pf.uuid)
+
+ self.assertEqual(db_vf['uuid'],
+ pf_get_2.virtual_function_list[0].uuid)
+
+
+class TestPhysicalFunctionObject(test_objects._LocalTest,
+ _TestPhysicalFunctionObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ pf = fake_physical_function.physical_function_obj(self.context)
+ fields_with_save_methods = [field for field in pf.fields
+ if hasattr(pf, '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(pf, '_save_%s' % field)
+ @mock.patch.object(pf, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ pf.obj_reset_changes(fields=[field])
+ pf._changed_fields.add(field)
+ self.assertRaises(expected_exception, pf.save)
+ pf.obj_reset_changes(fields=[field])
+ _test() \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_virtual_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_virtual_function.py
new file mode 100644
index 0000000..fea300f
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_virtual_function.py
@@ -0,0 +1,202 @@
+# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_physical_function
+from cyborg.tests.unit import fake_virtual_function
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestVirtualFunctionObject(DbTestCase):
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_accelerator(id=1)
+ return db_acc
+
+ @property
+ def fake_virtual_function(self):
+ db_vf = fake_virtual_function.fake_db_virtual_function(id=2)
+ return db_vf
+
+ @property
+ def fake_physical_function(self):
+ db_pf = fake_physical_function.fake_db_physical_function(id=3)
+ return db_pf
+
+ def test_create(self):
+ db_acc = self.fake_accelerator
+ db_vf = self.fake_virtual_function
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+
+ self.assertEqual(db_vf['uuid'], vf.uuid)
+
+ def test_get(self):
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ self.assertEqual(vf_get.uuid, vf.uuid)
+
+ def test_get_by_filter(self):
+ db_acc = self.fake_accelerator
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = pf_get.accelerator_id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ pf_get.add_vf(vf_get)
+ pf_get.save(self.context)
+
+ query = {"vendor": pf_get['vendor']}
+ vf_get_list = objects.VirtualFunction.get_by_filter(self.context,
+ query)
+
+ self.assertEqual(len(vf_get_list), 1)
+ self.assertEqual(vf_get_list[0].uuid, vf.uuid)
+ self.assertEqual(objects.VirtualFunction, type(vf_get_list[0]))
+ self.assertEqual(1, 1)
+
+ def test_get_by_filter2(self):
+ db_acc = self.fake_accelerator
+
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+
+ db_pf2 = self.fake_physical_function
+ db_vf2 = self.fake_virtual_function
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ pf2 = objects.PhysicalFunction(context=self.context,
+ **db_pf2)
+
+ pf2.accelerator_id = acc_get.id
+ pf2.create(self.context)
+ pf_get2 = objects.PhysicalFunction.get(self.context, pf2.uuid)
+ query = {"uuid": pf2.uuid}
+
+ pf_get_list = objects.PhysicalFunction.get_by_filter(self.context,
+ query)
+ self.assertEqual(1, 1)
+
+ def test_save(self):
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ vf_get.host = 'test_save'
+ vf_get.save(self.context)
+ vf_get_2 = objects.VirtualFunction.get(self.context, vf.uuid)
+ self.assertEqual(vf_get_2.host, 'test_save')
+
+ def test_destroy(self):
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ self.assertEqual(db_vf['uuid'], vf_get.uuid)
+ vf_get.destroy(self.context)
+ self.assertRaises(exception.DeployableNotFound,
+ objects.VirtualFunction.get, self.context,
+ vf_get['uuid'])
+
+
+class TestVirtualFunctionObject(test_objects._LocalTest,
+ _TestVirtualFunctionObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ vf = fake_virtual_function.virtual_function_obj(self.context)
+ fields_with_save_methods = [field for field in vf.fields
+ if hasattr(vf, '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(vf, '_save_%s' % field)
+ @mock.patch.object(vf, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ vf.obj_reset_changes(fields=[field])
+ vf._changed_fields.add(field)
+ self.assertRaises(expected_exception, vf.save)
+ vf.obj_reset_changes(fields=[field])
+ _test() \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/utils.py
new file mode 100644
index 0000000..99a1e83
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/utils.py
@@ -0,0 +1,41 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Cyborg object test utilities."""
+
+from cyborg import objects
+from cyborg.tests.unit.db import utils as db_utils
+
+
+def get_test_accelerator(ctxt, **kw):
+ """Return an Accelerator object with appropriate attributes.
+
+ NOTE: The object leaves the attributes marked as changed, such
+ that a create() could be used to commit it to the DB.
+ """
+ test_acc = db_utils.get_test_accelerator(**kw)
+ obj_acc = objects.Accelerator(ctxt, **test_acc)
+ return obj_acc
+
+
+def create_test_accelerator(ctxt, **kw):
+ """Create and return a test accelerator object.
+
+ Create an accelerator in the DB and return an Accelerator object with
+ appropriate attributes.
+ """
+ acc = get_test_accelerator(ctxt, **kw)
+ acc.create(ctxt)
+ return acc
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/policy_fixture.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/policy_fixture.py
new file mode 100644
index 0000000..6fad440
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/policy_fixture.py
@@ -0,0 +1,44 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import fixtures
+from oslo_config import cfg
+from oslo_policy import opts as policy_opts
+
+from cyborg.common import policy as cyborg_policy
+
+CONF = cfg.CONF
+
+policy_data = """
+{
+
+}
+"""
+
+
+class PolicyFixture(fixtures.Fixture):
+ def setUp(self):
+ super(PolicyFixture, self).setUp()
+ self.policy_dir = self.useFixture(fixtures.TempDir())
+ self.policy_file_name = os.path.join(self.policy_dir.path,
+ 'policy.json')
+ with open(self.policy_file_name, 'w') as policy_file:
+ policy_file.write(policy_data)
+ policy_opts.set_defaults(CONF)
+ CONF.set_override('policy_file', self.policy_file_name, 'oslo_policy')
+ cyborg_policy._ENFORCER = None
+ self.addCleanup(cyborg_policy.get_enforcer().clear)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/__init__.py
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/test_placement_client.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/test_placement_client.py
new file mode 100644
index 0000000..131e314
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/test_placement_client.py
@@ -0,0 +1,123 @@
+# Copyright (c) 2018 Lenovo Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cyborg.tests import base
+import mock
+from cyborg.services import report as placement_client
+from oslo_utils import uuidutils
+from cyborg.common import exception as c_exc
+from keystoneauth1 import exceptions as ks_exc
+from oslo_config import cfg
+
+
+class PlacementAPIClientTestCase(base.DietTestCase):
+ """Test the Placement API client."""
+
+ def setUp(self):
+ super(PlacementAPIClientTestCase, self).setUp()
+ self.mock_load_auth_p = mock.patch(
+ 'keystoneauth1.loading.load_auth_from_conf_options')
+ self.mock_load_auth = self.mock_load_auth_p.start()
+ self.mock_request_p = mock.patch(
+ 'keystoneauth1.session.Session.request')
+ self.mock_request = self.mock_request_p.start()
+ self.client = placement_client.SchedulerReportClient()
+
+ @mock.patch('keystoneauth1.session.Session')
+ @mock.patch('keystoneauth1.loading.load_auth_from_conf_options')
+ def test_constructor(self, load_auth_mock, ks_sess_mock):
+ placement_client.SchedulerReportClient()
+
+ load_auth_mock.assert_called_once_with(cfg.CONF, 'placement')
+ ks_sess_mock.assert_called_once_with(auth=load_auth_mock.return_value,
+ cert=None,
+ timeout=None,
+ verify=True)
+
+ def test_create_resource_provider(self):
+ expected_payload = 'fake_resource_provider'
+ self.client.create_resource_provider(expected_payload)
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ expected_url = '/resource_providers'
+ self.mock_request.assert_called_once_with(expected_url, 'POST',
+ endpoint_filter=e_filter,
+ json=expected_payload)
+
+ def test_delete_resource_provider(self):
+ rp_uuid = uuidutils.generate_uuid()
+ self.client.delete_resource_provider(rp_uuid)
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ expected_url = '/resource_providers/%s' % rp_uuid
+ self.mock_request.assert_called_once_with(expected_url, 'DELETE',
+ endpoint_filter=e_filter)
+
+ def test_create_inventory(self):
+ expected_payload = 'fake_inventory'
+ rp_uuid = uuidutils.generate_uuid()
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ self.client.create_inventory(rp_uuid, expected_payload)
+ expected_url = '/resource_providers/%s/inventories' % rp_uuid
+ self.mock_request.assert_called_once_with(expected_url, 'POST',
+ endpoint_filter=e_filter,
+ json=expected_payload)
+
+ def test_get_inventory(self):
+ rp_uuid = uuidutils.generate_uuid()
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ resource_class = 'fake_resource_class'
+ self.client.get_inventory(rp_uuid, resource_class)
+ expected_url = '/resource_providers/%s/inventories/%s' % (
+ rp_uuid, resource_class)
+ self.mock_request.assert_called_once_with(expected_url, 'GET',
+ endpoint_filter=e_filter)
+
+ def _test_get_inventory_not_found(self, details, expected_exception):
+ rp_uuid = uuidutils.generate_uuid()
+ resource_class = 'fake_resource_class'
+ self.mock_request.side_effect = ks_exc.NotFound(details=details)
+ self.assertRaises(expected_exception, self.client.get_inventory,
+ rp_uuid, resource_class)
+
+ def test_get_inventory_not_found_no_resource_provider(self):
+ self._test_get_inventory_not_found(
+ "No resource provider with uuid",
+ c_exc.PlacementResourceProviderNotFound)
+
+ def test_get_inventory_not_found_no_inventory(self):
+ self._test_get_inventory_not_found(
+ "No inventory of class", c_exc.PlacementInventoryNotFound)
+
+ def test_get_inventory_not_found_unknown_cause(self):
+ self._test_get_inventory_not_found("Unknown cause", ks_exc.NotFound)
+
+ def test_update_inventory(self):
+ expected_payload = 'fake_inventory'
+ rp_uuid = uuidutils.generate_uuid()
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ resource_class = 'fake_resource_class'
+ self.client.update_inventory(rp_uuid, expected_payload, resource_class)
+ expected_url = '/resource_providers/%s/inventories/%s' % (
+ rp_uuid, resource_class)
+ self.mock_request.assert_called_once_with(expected_url, 'PUT',
+ endpoint_filter=e_filter,
+ json=expected_payload)
+
+ def test_update_inventory_conflict(self):
+ rp_uuid = uuidutils.generate_uuid()
+ expected_payload = 'fake_inventory'
+ resource_class = 'fake_resource_class'
+ self.mock_request.side_effect = ks_exc.Conflict
+ self.assertRaises(c_exc.PlacementInventoryUpdateConflict,
+ self.client.update_inventory, rp_uuid,
+ expected_payload, resource_class) \ No newline at end of file
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/version.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/version.py
new file mode 100644
index 0000000..fd1dcdc
--- /dev/null
+++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/version.py
@@ -0,0 +1,19 @@
+# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pbr.version
+
+
+version_info = pbr.version.VersionInfo('cyborg')