diff options
Diffstat (limited to 'cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit')
40 files changed, 2831 insertions, 0 deletions
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/__init__.py new file mode 100644 index 0000000..eeaaced --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/__init__.py @@ -0,0 +1,38 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +:mod:`cyborg.tests.unit` -- cyborg unit tests +===================================================== + +.. automodule:: cyborg.tests.unit + :platform: Unix +""" + +import eventlet + +from cyborg import objects + + +eventlet.monkey_patch(os=False) + +# Make sure this is done after eventlet monkey patching otherwise +# the threading.local() store used in oslo_messaging will be initialized to +# threadlocal storage rather than greenthread local. This will cause context +# sets and deletes in that storage to clobber each other. +# Make sure we have all of the objects loaded. We do this +# at module import time, because we may be using mock decorators in our +# tests that run at import time. +objects.register_all() diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/base.py new file mode 100644 index 0000000..2041330 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/base.py @@ -0,0 +1,105 @@ +# Copyright 2018 Intel, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock +import os +import subprocess + +import fixtures + +from cyborg.accelerator.drivers.fpga.base import FPGADriver +from cyborg.accelerator.drivers.fpga.intel import sysinfo +from cyborg.tests import base +from cyborg.tests.unit.accelerator.drivers.fpga.intel import prepare_test_data + + +class TestFPGADriver(base.TestCase): + + def setUp(self): + super(TestFPGADriver, self).setUp() + self.syspath = sysinfo.SYS_FPGA + sysinfo.SYS_FPGA = "/sys/class/fpga" + tmp_sys_dir = self.useFixture(fixtures.TempDir()) + prepare_test_data.create_fake_sysfs(tmp_sys_dir.path) + sysinfo.SYS_FPGA = os.path.join( + tmp_sys_dir.path, sysinfo.SYS_FPGA.split("/", 1)[-1]) + + def tearDown(self): + super(TestFPGADriver, self).tearDown() + sysinfo.SYS_FPGA = self.syspath + + def test_create(self): + FPGADriver.create("intel") + self.assertRaises(LookupError, FPGADriver.create, "xilinx") + + def test_discover(self): + d = FPGADriver() + self.assertRaises(NotImplementedError, d.discover) + + def test_program(self): + d = FPGADriver() + self.assertRaises(NotImplementedError, d.program, "path", "image") + + def test_intel_discover(self): + expect = [{'function': 'pf', 'assignable': False, 'pr_num': '1', + 'vendor_id': '0x8086', 'devices': '0000:5e:00.0', + 'regions': [{ + 'function': 'vf', 'assignable': True, + 'product_id': '0xbcc1', + 'name': 'intel-fpga-dev.2', + 'parent_devices': '0000:5e:00.0', + 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA, + 'vendor_id': '0x8086', + 'devices': '0000:5e:00.1'}], + 'name': 'intel-fpga-dev.0', + 'parent_devices': '', + 'path': '%s/intel-fpga-dev.0' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc0'}, + {'function': 'pf', 'assignable': True, 'pr_num': '0', + 'vendor_id': '0x8086', 'devices': '0000:be:00.0', + 'name': 'intel-fpga-dev.1', + 'parent_devices': '', + 'path': '%s/intel-fpga-dev.1' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc0'}] + expect.sort() + + intel = FPGADriver.create("intel") + fpgas = intel.discover() + fpgas.sort() + self.assertEqual(2, len(fpgas)) + self.assertEqual(fpgas, expect) + + @mock.patch.object(subprocess, 'Popen', autospec=True) + def test_intel_program(self, mock_popen): + + class p(object): + returncode = 0 + + def wait(self): + pass + + b = "0x5e" + d = "0x00" + f = "0x0" + expect_cmd = ['sudo', 'fpgaconf', '-b', b, + '-d', d, '-f', f, '/path/image'] + mock_popen.return_value = p() + intel = FPGADriver.create("intel") + # program VF + intel.program("0000:5e:00.1", "/path/image") + mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE) + + # program PF + intel.program("0000:5e:00.0", "/path/image") + mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE) diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/driver.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/driver.py new file mode 100644 index 0000000..5760ecf --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/driver.py @@ -0,0 +1,93 @@ +# Copyright 2018 Intel, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock +import os +import subprocess + +import fixtures + +from cyborg.accelerator.drivers.fpga.intel import sysinfo +from cyborg.accelerator.drivers.fpga.intel.driver import IntelFPGADriver +from cyborg.tests import base +from cyborg.tests.unit.accelerator.drivers.fpga.intel import prepare_test_data + + +class TestIntelFPGADriver(base.TestCase): + + def setUp(self): + super(TestIntelFPGADriver, self).setUp() + self.syspath = sysinfo.SYS_FPGA + sysinfo.SYS_FPGA = "/sys/class/fpga" + tmp_sys_dir = self.useFixture(fixtures.TempDir()) + prepare_test_data.create_fake_sysfs(tmp_sys_dir.path) + sysinfo.SYS_FPGA = os.path.join( + tmp_sys_dir.path, sysinfo.SYS_FPGA.split("/", 1)[-1]) + + def tearDown(self): + super(TestIntelFPGADriver, self).tearDown() + sysinfo.SYS_FPGA = self.syspath + + def test_discover(self): + expect = [{'function': 'pf', 'assignable': False, 'pr_num': '1', + 'vendor_id': '0x8086', 'devices': '0000:5e:00.0', + 'regions': [{ + 'function': 'vf', 'assignable': True, + 'product_id': '0xbcc1', + 'name': 'intel-fpga-dev.2', + 'parent_devices': '0000:5e:00.0', + 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA, + 'vendor_id': '0x8086', + 'devices': '0000:5e:00.1'}], + 'name': 'intel-fpga-dev.0', + 'parent_devices': '', + 'path': '%s/intel-fpga-dev.0' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc0'}, + {'function': 'pf', 'assignable': True, 'pr_num': '0', + 'vendor_id': '0x8086', 'devices': '0000:be:00.0', + 'parent_devices': '', + 'name': 'intel-fpga-dev.1', + 'path': '%s/intel-fpga-dev.1' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc0'}] + expect.sort() + + intel = IntelFPGADriver() + fpgas = intel.discover() + fpgas.sort() + self.assertEqual(2, len(fpgas)) + self.assertEqual(fpgas, expect) + + @mock.patch.object(subprocess, 'Popen', autospec=True) + def test_intel_program(self, mock_popen): + + class p(object): + returncode = 0 + + def wait(self): + pass + + b = "0x5e" + d = "0x00" + f = "0x0" + expect_cmd = ['sudo', 'fpgaconf', '-b', b, + '-d', d, '-f', f, '/path/image'] + mock_popen.return_value = p() + intel = IntelFPGADriver() + # program VF + intel.program("0000:5e:00.1", "/path/image") + mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE) + + # program PF + intel.program("0000:5e:00.0", "/path/image") + mock_popen.assert_called_with(expect_cmd, stdout=subprocess.PIPE) diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/prepare_test_data.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/prepare_test_data.py new file mode 100644 index 0000000..8955c39 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/fpga/intel/prepare_test_data.py @@ -0,0 +1,295 @@ +# Copyright 2018 Intel, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import argparse +import copy +import glob +import os +import shutil + + +PF0_ADDR = "0000:5e:00.0" +PF1_ADDR = "0000:be:00.0" +VF0_ADDR = "0000:5e:00.1" +FPGA_TREE = { + "dev.0": {"bdf": PF0_ADDR, + "regions": {"dev.2": {"bdf": VF0_ADDR}}}, + "dev.1": {"bdf": PF1_ADDR}} + +SYS_DEVICES = "sys/devices" +SYS_CLASS_FPGA = "sys/class/fpga" + +DEV_PREFIX = "intel-fpga" + +PGFA_DEVICE_COMMON_SUB_DIR = ["power"] + +PGFA_DEVICE_COMMON_CONTENT = { + "broken_parity_status": "0", + "class": "0x120000", + "config": "", + "consistent_dma_mask_bits": "64", + "d3cold_allowed": "1", + "device": "0xbcc0", + "dma_mask_bits": "64", + "driver_override": "(null)", + "enable": "1", + "irq": "16", + "local_cpulist": "0-111", + "local_cpus": "00000000,00000000,00000000,00000000,00000000," + "00000000,00000000,00000000,00000000,00000000," + "0000ffff,ffffffff,ffffffff,ffffffff", + "modalias": "pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00", + "msi_bus": "", + "numa_node": "-1", + "resource": [ + "0x00000000c6000000 0x00000000c607ffff 0x000000000014220c", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x00000000c6080000 0x00000000c60fffff 0x000000000014220c", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x00000000c6100000 0x00000000c617ffff 0x000000000014220c", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000"], + "resource0": "", + "resource0_wc": "", + "subsystem_device": "0x0000", + "subsystem_vendor": "0x0000", + "uevent": [ + "DRIVER=intel-fpga-pci", + "PCI_CLASS=120000", + "PCI_ID=8086:BCC0", + "PCI_SUBSYS_ID=0000:0000", + "PCI_SLOT_NAME=0000:5e:00.0", + "MODALIAS=pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00"], + "vendor": "0x8086"} + +PGFA_DEVICES_SPECIAL_COMMON_CONTENT = { + "dev.0": { + "resource2": "", + "resource2_wc": "", + "sriov_numvfs": "1", + "sriov_totalvfs": "1", + }, + "dev.1": { + "resource": [ + "0x00000000fbc00000 0x00000000fbc7ffff 0x000000000014220c", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x00000000fbc80000 0x00000000fbcfffff 0x000000000014220c", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x00000000fbd00000 0x00000000fbd7ffff 0x000000000014220c", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000"], + "resource2": "", + "resource2_wc": "", + "sriov_numvfs": "0", + "sriov_totalvfs": "1", + "uevent": [ + "DRIVER=intel-fpga-pci", + "PCI_CLASS=120000", + "PCI_ID=8086:BCC0", + "PCI_SUBSYS_ID=0000:0000", + "PCI_SLOT_NAME=0000:be:00.0", + "MODALIAS=pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00"], + }, + "dev.2": { + "d3cold_allowed": "0", + "device": "0xbcc1", + "modalias": "pci:v00008086d0000BCC0sv00000000sd00000000bc12sc00i00", + "irq": "0", + "resource": [ + "0x00000000c6100000 0x00000000c617ffff 0x000000000014220c", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000", + "0x0000000000000000 0x0000000000000000 0x0000000000000000"], + "uevent": [ + "DRIVER=intel-fpga-pci", + "PCI_CLASS=120000", + "PCI_ID=8086:BCC1", + "PCI_SUBSYS_ID=0000:0000", + "PCI_SLOT_NAME=0000:5e:00.1", + "MODALIAS=pci:v00008086d0000BCC1sv00000000sd00000000bc12sc00i00"], + } +} + +PGFA_DEVICE_COMMON_SOFT_LINK = { + "driver": "../../../bus/pci/drivers/intel-fpga-pci", + "iommu": "../../virtual/iommu/dmar8", + "iommu_group": "../../../kernel/iommu_groups/75", + "subsystem": "../../../bus/pci" +} + +PGFA_DEVICES_SPECIAL_SOFT_LINK = { + "dev.0": { + "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:18/device:1d4", + }, + "dev.1": { + "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:19/device:1d5", + "iommu": "../../virtual/iommu/dmar4", + "iommu_group": "../../../kernel/iommu_groups/76", + }, + "dev.2": { + "iommu": "../../virtual/iommu/dmar9", + "iommu_group": "../../../kernel/iommu_groups/81", + } +} +PGFA_DEVICES_SPECIAL_SOFT_LINK = { + "dev.0": { + "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:18/device:1d4", + }, + "dev.1": { + "firmware_node": "../../LNXSYSTM:00/device:00/PNP0A08:19/device:1d5", + "iommu": "../../virtual/iommu/dmar4", + "iommu_group": "../../../kernel/iommu_groups/76", + }, + "dev.2": { + "iommu": "../../virtual/iommu/dmar9", + "iommu_group": "../../../kernel/iommu_groups/81", + } +} + +PGFA_DEVICE_PF_SOFT_LINK = { + "virtfn": lambda k, v: (k + str(int(v.rsplit(".", 1)[-1]) - 1), + "/".join(["..", v])) +} + +PGFA_DEVICE_VF_SOFT_LINK = { + "physfn": lambda k, v: (k, "/".join(["..", v])) +} + + +def gen_fpga_content(path, dev): + content = copy.copy(PGFA_DEVICE_COMMON_CONTENT) + content.update(PGFA_DEVICES_SPECIAL_COMMON_CONTENT[dev]) + for k, v in content.items(): + p = os.path.join(path, k) + if not v: + os.mknod(p) + elif type(v) is str: + with open(p, 'a') as f: + f.write(v + "\n") + elif type(v) is list: + with open(p, 'a') as f: + f.writelines([l + "\n" for l in v]) + + +def gen_fpga_sub_dir(path): + for d in PGFA_DEVICE_COMMON_SUB_DIR: + p = os.path.join(path, d) + os.makedirs(p) + + +def gen_fpga_pf_soft_link(path, bdf): + for k, v in PGFA_DEVICE_PF_SOFT_LINK.items(): + if callable(v): + k, v = v(k, bdf) + os.symlink(v, os.path.join(path, k)) + + +def gen_fpga_common_soft_link(path, bdf): + for k, v in PGFA_DEVICE_COMMON_SOFT_LINK.items(): + os.symlink(v, os.path.join(path, k)) + + +def gen_fpga_vf_soft_link(path, bdf): + for k, v in PGFA_DEVICE_VF_SOFT_LINK.items(): + if callable(v): + k, v = v(k, bdf) + os.symlink(v, os.path.join(path, k)) + + +def create_devices_path_and_files(tree, device_path, class_fpga_path, + vf=False, pfinfo={}): + for k, v in tree.items(): + bdf = v["bdf"] + pci_path = "pci" + bdf.rsplit(":", 1)[0] + bdf_path = os.path.join(device_path, pci_path, bdf) + ln = "-".join([DEV_PREFIX, k]) + dev_path = os.path.join(bdf_path, "fpga", ln) + os.makedirs(dev_path) + gen_fpga_content(bdf_path, k) + gen_fpga_sub_dir(bdf_path) + if vf: + gen_fpga_pf_soft_link(pfinfo["path"], bdf) + gen_fpga_vf_soft_link(bdf_path, pfinfo["bdf"]) + pfinfo = {"path": bdf_path, "bdf": bdf} + if "regions" in v: + create_devices_path_and_files( + v["regions"], device_path, class_fpga_path, True, pfinfo) + source = dev_path.split("sys")[-1] + os.symlink("../.." + source, os.path.join(class_fpga_path, ln)) + os.symlink("../../../" + bdf, os.path.join(dev_path, "device")) + + +def create_devices_soft_link(class_fpga_path): + devs = glob.glob1(class_fpga_path, "*") + for dev in devs: + path = os.path.realpath("%s/%s/device" % (class_fpga_path, dev)) + softlinks = copy.copy(PGFA_DEVICE_COMMON_SOFT_LINK) + softlinks.update( + PGFA_DEVICES_SPECIAL_SOFT_LINK[dev.rsplit("-", 1)[-1]]) + for k, v in softlinks.items(): + source = os.path.normpath(os.path.join(path, v)) + if not os.path.exists(source): + os.makedirs(source) + os.symlink(v, os.path.join(path, k)) + + +def create_fake_sysfs(prefix=""): + sys_device = os.path.join(prefix, SYS_DEVICES) + sys_class_fpga = os.path.join(prefix, SYS_CLASS_FPGA) + basedir = os.path.dirname(sys_device) + if os.path.exists(basedir): + shutil.rmtree(basedir, ignore_errors=False, onerror=None) + os.makedirs(sys_class_fpga) + create_devices_path_and_files(FPGA_TREE, sys_device, sys_class_fpga) + create_devices_soft_link(sys_class_fpga) + + +def main(): + create_fake_sysfs() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Generate a fake sysfs for intel FPGA.") + group = parser.add_mutually_exclusive_group() + group.add_argument("-v", "--verbose", action="store_true") + group.add_argument("-q", "--quiet", action="store_true") + parser.add_argument("-p", "--prefix", type=str, + default="/tmp", dest="p", + help='Set the prefix path of the fake sysfs. ' + 'default "/tmp"') + args = parser.parse_args() + + create_fake_sysfs(args.p) diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/test_generic.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/test_generic.py new file mode 100644 index 0000000..bf066e4 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/modules/test_generic.py @@ -0,0 +1,66 @@ +# Copyright 2017 Lenovo Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Base classes for Generic Driver tests.""" + +import mock + +from cyborg.accelerator.drivers.generic_driver import GenericDriver as generic +from cyborg.conductor.rpcapi import ConductorAPI as conductor_api + +FAKE_CONTEXT = mock.MagicMock() + + +class GenericDriverTest(): + """Class for testing of generic driver + """ + + def setUp(self): + super(GenericDriverTest, self).setUp() + + @mock.patch.object(conductor_api, 'accelerator_create') + def test_create_accelerator(self, mock_acc_create): + mock_acc_create.return_value = self.acc + generic.create_accelerator(context=FAKE_CONTEXT) + + mock_acc_create.assert_called() + + @mock.patch.object(conductor_api, 'accelerator_list_one') + def test_get_accelerator(self, mock_acc_get): + mock_acc_get.return_value = self.acc + generic.get_accelerator(context=FAKE_CONTEXT) + + mock_acc_get.assert_called() + + @mock.patch.object(conductor_api, 'accelerator_list_all') + def test_list_accelerators(self, mock_acc_list): + mock_acc_list.return_value = self.acc + generic.list_accelerators(context=FAKE_CONTEXT) + + mock_acc_list.assert_called() + + @mock.patch.object(conductor_api, 'accelerator_update') + def test_update_accelerator(self, mock_acc_update): + mock_acc_update.return_value = self.acc + generic.update_accelerator(context=FAKE_CONTEXT) + + mock_acc_update.assert_called() + + @mock.patch.object(conductor_api, 'accelerator_delete') + def test_delete_accelerator(self, mock_acc_delete): + mock_acc_delete.return_value = self.acc + generic.delete_accelerator(context=FAKE_CONTEXT) + + mock_acc_delete.assert_called() diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/test_nvmf.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/test_nvmf.py new file mode 100644 index 0000000..9f9a5be --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/nvmf/test_nvmf.py @@ -0,0 +1,131 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from cyborg.tests import base +import mock +from cyborg.accelerator.drivers.spdk.nvmf.nvmf import NVMFDRIVER +from cyborg.accelerator.drivers.spdk.util import common_fun +from cyborg.accelerator.drivers.spdk.util.pyspdk.nvmf_client import NvmfTgt + + +class TestNVMFDRIVER(base.TestCase): + + def setUp(self,): + super(TestNVMFDRIVER, self).setUp() + self.nvmf_driver = NVMFDRIVER() + + def tearDown(self): + super(TestNVMFDRIVER, self).tearDown() + self.vhost_driver = None + + @mock.patch.object(NVMFDRIVER, 'get_one_accelerator') + def test_discover_accelerator(self, mock_get_one_accelerator): + expect_accelerator = { + 'server': 'nvmf', + 'bdevs': [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }], + 'subsystems': [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + } + alive = mock.Mock(return_value=False) + self.nvmf_driver.py.is_alive = alive + check_error = mock.Mock(return_value=False) + common_fun.check_for_setup_error = check_error + self.assertFalse( + mock_get_one_accelerator.called, + "Failed to discover_accelerator if py not alive." + ) + alive = mock.Mock(return_value=True) + self.nvmf_driver.py.is_alive = alive + check_error = mock.Mock(return_value=True) + common_fun.check_for_setup_error = check_error + acce_client = NvmfTgt(self.nvmf_driver.py) + bdevs_fake = [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }] + bdev_list = mock.Mock(return_value=bdevs_fake) + acce_client.get_bdevs = bdev_list + subsystems_fake = [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + subsystem_list = mock.Mock(return_value=subsystems_fake) + acce_client.get_nvmf_subsystems = subsystem_list + accelerator_fake = { + 'server': self.nvmf_driver.SERVER, + 'bdevs': acce_client.get_bdevs(), + 'subsystems': acce_client.get_nvmf_subsystems() + } + success_send = mock.Mock(return_value=accelerator_fake) + self.nvmf_driver.get_one_accelerator = success_send + accelerator = self.nvmf_driver.discover_accelerator() + self.assertEqual(accelerator, expect_accelerator) + + def test_accelerator_list(self): + expect_accelerators = [{ + 'server': 'nvmf', + 'bdevs': [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }], + 'subsystems': + [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + }, + { + 'server': 'nvnf_tgt', + 'bdevs': [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }], + 'subsystems': + [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + } + ] + success_send = mock.Mock(return_value=expect_accelerators) + self.nvmf_driver.get_all_accelerators = success_send + self.assertEqual(self.nvmf_driver.accelerator_list(), + expect_accelerators) + + def test_install_accelerator(self): + pass + + def test_uninstall_accelerator(self): + pass + + def test_update(self): + pass + + def test_attach_instance(self): + pass + + def test_detach_instance(self): + pass + + def test_delete_subsystem(self): + pass + + def test_construct_subsystem(self): + pass diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/test_vhost.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/test_vhost.py new file mode 100644 index 0000000..3c04b8c --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/accelerator/drivers/spdk/vhost/test_vhost.py @@ -0,0 +1,144 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from cyborg.tests import base +import mock +from cyborg.accelerator.drivers.spdk.vhost.vhost import VHOSTDRIVER +from cyborg.accelerator.drivers.spdk.util import common_fun +from cyborg.accelerator.drivers.spdk.util.pyspdk.vhost_client import VhostTgt + + +class TestVHOSTDRIVER(base.TestCase): + + def setUp(self): + super(TestVHOSTDRIVER, self).setUp() + self.vhost_driver = VHOSTDRIVER() + + def tearDown(self): + super(TestVHOSTDRIVER, self).tearDown() + self.vhost_driver = None + + @mock.patch.object(VHOSTDRIVER, 'get_one_accelerator') + def test_discover_accelerator(self, mock_get_one_accelerator): + expect_accelerator = { + 'server': 'vhost', + 'bdevs': [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }], + 'scsi_devices': [], + 'luns': [{"claimed": True, + "name": "Malloc0"}], + 'interfaces': [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + } + alive = mock.Mock(return_value=True) + self.vhost_driver.py.is_alive = alive + check_error = mock.Mock(return_value=True) + common_fun.check_for_setup_error = check_error + self.assertFalse( + mock_get_one_accelerator.called, + "Failed to discover_accelerator if py not alive." + ) + acce_client = VhostTgt(self.vhost_driver.py) + bdevs_fake = [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }] + bdev_list = mock.Mock(return_value=bdevs_fake) + acce_client.get_bdevs = bdev_list + scsi_devices_fake = [] + scsi_device_list = mock.Mock(return_value=scsi_devices_fake) + acce_client.get_scsi_devices = scsi_device_list + luns_fake = [{"claimed": True, + "name": "Malloc0"}] + lun_list = mock.Mock(return_value=luns_fake) + acce_client.get_luns = lun_list + interfaces_fake = \ + [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + interface_list = mock.Mock(return_value=interfaces_fake) + acce_client.get_interfaces = interface_list + accelerator_fake = { + 'server': self.vhost_driver.SERVER, + 'bdevs': acce_client.get_bdevs(), + 'scsi_devices': acce_client.get_scsi_devices(), + 'luns': acce_client.get_luns(), + 'interfaces': acce_client.get_interfaces() + } + success_send = mock.Mock(return_value=accelerator_fake) + self.vhost_driver.get_one_accelerator = success_send + accelerator = self.vhost_driver.discover_accelerator() + self.assertEqual(accelerator, expect_accelerator) + + def test_accelerator_list(self): + expect_accelerators = [{ + 'server': 'vhost', + 'bdevs': [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }], + 'scsi_devices': [], + 'luns': [{"claimed": True, + "name": "Malloc0"}], + 'interfaces': [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + }, + { + 'server': 'vhost_tgt', + 'bdevs': [{"num_blocks": 131072, + "name": "nvme1", + "block_size": 512 + }], + 'scsi_devices': [], + 'luns': [{"claimed": True, + "name": "Malloc0"}], + 'interfaces': [{"core": 0, + "nqn": "nqn.2018-01.org.nvmexpress.discovery", + "hosts": [] + }] + } + ] + success_send = mock.Mock(return_value=expect_accelerators) + self.vhost_driver.get_all_accelerators = success_send + self.assertEqual(self.vhost_driver.accelerator_list(), + expect_accelerators) + + def test_install_accelerator(self): + pass + + def test_uninstall_accelerator(self): + pass + + def test_update(self): + pass + + def test_attach_instance(self): + pass + + def test_detach_instance(self): + pass + + def test_delete_ip_address(self): + pass + + def test_add_ip_address(self): + pass diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/test_resource_tracker.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/test_resource_tracker.py new file mode 100644 index 0000000..8f277c0 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/agent/test_resource_tracker.py @@ -0,0 +1,91 @@ +# Copyright (c) 2018 Intel. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +"""Cyborg agent resource_tracker test cases.""" + +import os + +import fixtures + +from cyborg.accelerator.drivers.fpga import utils +from cyborg.accelerator.drivers.fpga.intel import sysinfo +from cyborg.agent.resource_tracker import ResourceTracker +from cyborg.conductor import rpcapi as cond_api +from cyborg.conf import CONF +from cyborg.tests import base +from cyborg.tests.unit.accelerator.drivers.fpga.intel import prepare_test_data + + +class TestResourceTracker(base.TestCase): + """Test Agent ResourceTracker """ + + def setUp(self): + super(TestResourceTracker, self).setUp() + self.syspath = sysinfo.SYS_FPGA + sysinfo.SYS_FPGA = "/sys/class/fpga" + tmp_sys_dir = self.useFixture(fixtures.TempDir()) + prepare_test_data.create_fake_sysfs(tmp_sys_dir.path) + sysinfo.SYS_FPGA = os.path.join( + tmp_sys_dir.path, sysinfo.SYS_FPGA.split("/", 1)[-1]) + utils.SYS_FPGA_PATH = sysinfo.SYS_FPGA + self.host = CONF.host + self.cond_api = cond_api.ConductorAPI() + self.rt = ResourceTracker(self.host, self.cond_api) + + def tearDown(self): + super(TestResourceTracker, self).tearDown() + sysinfo.SYS_FPGA = self.syspath + utils.SYS_FPGA_PATH = self.syspath + + def test_update_usage(self): + """Update the resource usage and stats after a change in an + instance + """ + # FIXME(Shaohe Feng) need add testcase. How to check the fpgas + # has stored into DB by conductor correctly? + pass + + def test_get_fpga_devices(self): + expect = { + '0000:5e:00.0': { + 'function': 'pf', 'assignable': False, 'pr_num': '1', + 'name': 'intel-fpga-dev.0', 'vendor_id': '0x8086', + 'devices': '0000:5e:00.0', + 'regions': [{ + 'function': 'vf', 'assignable': True, + 'name': 'intel-fpga-dev.2', 'vendor_id': '0x8086', + 'devices': '0000:5e:00.1', + 'parent_devices': '0000:5e:00.0', + 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc1'}], + 'parent_devices': '', + 'path': '%s/intel-fpga-dev.0' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc0'}, + '0000:5e:00.1': { + 'function': 'vf', 'assignable': True, + 'name': 'intel-fpga-dev.2', 'vendor_id': '0x8086', + 'devices': '0000:5e:00.1', + 'parent_devices': '0000:5e:00.0', + 'path': '%s/intel-fpga-dev.2' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc1'}, + '0000:be:00.0': { + 'function': 'pf', 'assignable': True, 'pr_num': '0', + 'name': 'intel-fpga-dev.1', 'vendor_id': '0x8086', + 'devices': '0000:be:00.0', 'parent_devices': '', + 'path': '%s/intel-fpga-dev.1' % sysinfo.SYS_FPGA, + 'product_id': '0xbcc0'}} + fpgas = self.rt._get_fpga_devices() + self.assertDictEqual(expect, fpgas) diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/base.py new file mode 100644 index 0000000..75578fd --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/base.py @@ -0,0 +1,214 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Base classes for API tests.""" + +from oslo_config import cfg +import pecan +import pecan.testing + +from cyborg.tests.unit.db import base + + +cfg.CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token') + + +class BaseApiTest(base.DbTestCase): + """Pecan controller functional testing class. + + Used for functional tests of Pecan controllers where you need to + test your literal application and its integration with the + framework. + """ + + PATH_PREFIX = '' + + def setUp(self): + super(BaseApiTest, self).setUp() + cfg.CONF.set_override("auth_version", "v3", + group='keystone_authtoken') + cfg.CONF.set_override("admin_user", "admin", + group='keystone_authtoken') + self.app = self._make_app() + + def reset_pecan(): + pecan.set_config({}, overwrite=True) + + self.addCleanup(reset_pecan) + + def _make_app(self): + # Determine where we are so we can set up paths in the config + root_dir = self.get_path() + + self.app_config = { + 'app': { + 'root': 'cyborg.api.controllers.root.RootController', + 'modules': ['cyborg.api'], + 'static_root': '%s/public' % root_dir, + 'template_path': '%s/api/templates' % root_dir, + 'acl_public_routes': ['/', '/v1/.*'], + }, + } + return pecan.testing.load_test_app(self.app_config) + + def _request_json(self, path, params, expect_errors=False, headers=None, + method="post", extra_environ=None, status=None): + """Sends simulated HTTP request to Pecan test app. + + :param path: url path of target service + :param params: content for wsgi.input of request + :param expect_errors: Boolean value; whether an error is expected based + on request + :param headers: a dictionary of headers to send along with the request + :param method: Request method type. Appropriate method function call + should be used rather than passing attribute in. + :param extra_environ: a dictionary of environ variables to send along + with the request + :param status: expected status code of response + """ + response = getattr(self.app, "%s_json" % method)( + str(path), + params=params, + headers=headers, + status=status, + extra_environ=extra_environ, + expect_errors=expect_errors + ) + return response + + def post_json(self, path, params, expect_errors=False, headers=None, + extra_environ=None, status=None): + """Sends simulated HTTP POST request to Pecan test app. + + :param path: url path of target service + :param params: content for wsgi.input of request + :param expect_errors: Boolean value; whether an error is expected based + on request + :param headers: a dictionary of headers to send along with the request + :param extra_environ: a dictionary of environ variables to send along + with the request + :param status: expected status code of response + """ + full_path = self.PATH_PREFIX + path + return self._request_json(path=full_path, params=params, + expect_errors=expect_errors, + headers=headers, extra_environ=extra_environ, + status=status, method="post") + + def gen_headers(self, context, **kw): + """Generate a header for a simulated HTTP request to Pecan test app. + + :param context: context that store the client user information. + :param kw: key word aguments, used to overwrite the context attribute. + + note: "is_public_api" is not in headers, it should be in environ + variables to send along with the request. We can pass it by + extra_environ when we call delete, get_json or other method request. + """ + ct = context.to_dict() + ct.update(kw) + headers = { + 'X-User-Name': ct.get("user_name") or "user", + 'X-User-Id': + ct.get("user") or "1d6d686bc2c949ddb685ffb4682e0047", + 'X-Project-Name': ct.get("project_name") or "project", + 'X-Project-Id': + ct.get("tenant") or "86f64f561b6d4f479655384572727f70", + 'X-User-Domain-Id': + ct.get("domain_id") or "bd5eeb7d0fb046daaf694b36f4df5518", + 'X-User-Domain-Name': ct.get("domain_name") or "no_domain", + 'X-Auth-Token': + ct.get("auth_token") or "b9764005b8c145bf972634fb16a826e8", + 'X-Roles': ct.get("roles") or "cyborg" + } + + return headers + + def get_json(self, path, expect_errors=False, headers=None, + extra_environ=None, q=None, **params): + """Sends simulated HTTP GET request to Pecan test app. + + :param path: url path of target service + :param expect_errors: Boolean value; whether an error is expected based + on request + :param headers: a dictionary of headers to send along with the request + :param extra_environ: a dictionary of environ variables to send along + with the request + :param q: list of queries consisting of: field, value, op, and type + keys + :param path_prefix: prefix of the url path + :param params: content for wsgi.input of request + """ + full_path = self.PATH_PREFIX + path + q = q or [] + query_params = { + 'q.field': [], + 'q.value': [], + 'q.op': [], + } + for query in q: + for name in ['field', 'op', 'value']: + query_params['q.%s' % name].append(query.get(name, '')) + all_params = {} + all_params.update(params) + if q: + all_params.update(query_params) + response = self.app.get(full_path, + params=all_params, + headers=headers, + extra_environ=extra_environ, + expect_errors=expect_errors) + if not expect_errors: + response = response.json + return response + + def patch_json(self, path, params, expect_errors=False, headers=None, + extra_environ=None, status=None): + """Sends simulated HTTP PATCH request to Pecan test app. + + :param path: url path of target service + :param params: content for wsgi.input of request + :param expect_errors: Boolean value; whether an error is expected based + on request + :param headers: a dictionary of headers to send along with the request + :param extra_environ: a dictionary of environ variables to send along + with the request + :param status: expected status code of response + """ + full_path = self.PATH_PREFIX + path + return self._request_json(path=full_path, params=params, + expect_errors=expect_errors, + headers=headers, extra_environ=extra_environ, + status=status, method="put") + + def delete(self, path, expect_errors=False, headers=None, + extra_environ=None, status=None): + """Sends simulated HTTP DELETE request to Pecan test app. + + :param path: url path of target service + :param expect_errors: Boolean value; whether an error is expected based + on request + :param headers: a dictionary of headers to send along with the request + :param extra_environ: a dictionary of environ variables to send along + with the request + :param status: expected status code of response + """ + full_path = self.PATH_PREFIX + path + response = self.app.delete(full_path, + headers=headers, + status=status, + extra_environ=extra_environ, + expect_errors=expect_errors) + return response diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/base.py new file mode 100644 index 0000000..c16eaee --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/base.py @@ -0,0 +1,21 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from cyborg.tests.unit.api import base + + +class APITestV1(base.BaseApiTest): + + PATH_PREFIX = '/v1' diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/test_accelerators.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/test_accelerators.py new file mode 100644 index 0000000..9f606a4 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/api/controllers/v1/test_accelerators.py @@ -0,0 +1,174 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import datetime +import mock +from oslo_utils import timeutils +from six.moves import http_client + +from cyborg.conductor import rpcapi +from cyborg.tests.unit.api.controllers.v1 import base as v1_test +from cyborg.tests.unit.db import utils as db_utils +from cyborg.tests.unit.objects import utils as obj_utils + +def gen_post_body(**kw): + return db_utils.get_test_accelerator(**kw) + + +def _rpcapi_accelerator_create(context, obj_acc): + """Fake used to mock out the conductor RPCAPI's accelerator_create method. + + Performs creation of the accelerator object and returns the created + accelerator as-per the real method. + """ + obj_acc.create(context) + return obj_acc + + + +class TestPost(v1_test.APITestV1): + + ACCELERATOR_UUID = '10efe63d-dfea-4a37-ad94-4116fba50981' + + def setUp(self): + super(TestPost, self).setUp() + self.headers = self.gen_headers(self.context) + + p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_create') + self.mock_create = p.start() + self.mock_create.side_effect = _rpcapi_accelerator_create + self.addCleanup(p.stop) + + @mock.patch('oslo_utils.uuidutils.generate_uuid') + def test_post(self, mock_uuid): + mock_uuid.return_value = self.ACCELERATOR_UUID + + body = gen_post_body(name='post_accelerator') + response = self.post_json('/accelerators', body, headers=self.headers) + self.assertEqual(http_client.CREATED, response.status_int) + response = response.json + self.assertEqual(self.ACCELERATOR_UUID, response['uuid']) + self.assertEqual(body['name'], response['name']) + self.mock_create.assert_called_once_with(mock.ANY, mock.ANY, mock.ANY) + + +class TestList(v1_test.APITestV1): + + def setUp(self): + super(TestList, self).setUp() + self.accs = [] + for i in range(3): + acc = obj_utils.create_test_accelerator(self.context) + self.accs.append(acc) + self.acc = self.accs[0] + self.context.tenant = self.acc.project_id + self.headers = self.gen_headers(self.context) + + def test_get_one(self): + data = self.get_json('/accelerators/%s' % self.acc.uuid, + headers=self.headers) + self.assertEqual(self.acc.uuid, data['uuid']) + self.assertIn('acc_capability', data) + self.assertIn('acc_type', data) + self.assertIn('created_at', data) + self.assertIn('description', data) + self.assertIn('device_type', data) + self.assertIn('links', data) + self.assertIn('name', data) + self.assertIn('product_id', data) + self.assertIn('project_id', data) + self.assertIn('remotable', data) + self.assertIn('updated_at', data) + self.assertIn('user_id', data) + self.assertIn('vendor_id', data) + + def test_get_all(self): + data = self.get_json('/accelerators', headers=self.headers) + self.assertEqual(3, len(data['accelerators'])) + data_uuids = [d['uuid'] for d in data['accelerators']] + acc_uuids = [acc.uuid for acc in self.accs] + self.assertItemsEqual(acc_uuids, data_uuids) + + +def _rpcapi_accelerator_update(context, obj_acc): + """Fake used to mock out the conductor RPCAPI's accelerator_update method. + + Performs update of the accelerator object and returns the updated + accelerator as-per the real method. + """ + obj_acc.save(context) + return obj_acc + + +class TestPut(v1_test.APITestV1): + + def setUp(self): + super(TestPut, self).setUp() + self.acc = obj_utils.create_test_accelerator(self.context) + self.context.tenant = self.acc.project_id + self.headers = self.gen_headers(self.context) + + p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_update') + self.mock_update = p.start() + self.mock_update.side_effect = _rpcapi_accelerator_update + self.addCleanup(p.stop) + + @mock.patch.object(timeutils, 'utcnow') + def test_put(self, mock_utcnow): + test_time = datetime.datetime(2012, 12, 12, 12, 12) + mock_utcnow.return_value = test_time + + description = 'new-description' + response = self.patch_json('/accelerators/%s' % self.acc.uuid, + [{'path': '/description', + 'value': description, + 'op': 'replace'}], + headers=self.headers) + self.assertEqual(http_client.OK, response.status_code) + data = self.get_json('/accelerators/%s' % self.acc.uuid, + headers=self.headers) + self.assertEqual(description, data['description']) + return_updated_at = timeutils.parse_isotime( + data['updated_at']).replace(tzinfo=None) + self.assertEqual(test_time, return_updated_at) + self.mock_update.assert_called_once_with(mock.ANY, mock.ANY) + + +def _rpcapi_accelerator_delete(context, obj_acc): + """Fake used to mock out the conductor RPCAPI's accelerator_delete method. + + Performs deletion of the accelerator object as-per the real method. + """ + obj_acc.destroy(context) + + +class TestDelete(v1_test.APITestV1): + + def setUp(self): + super(TestDelete, self).setUp() + self.acc = obj_utils.create_test_accelerator(self.context) + self.context.tenant = self.acc.project_id + self.headers = self.gen_headers(self.context) + + p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_delete') + self.mock_delete = p.start() + self.mock_delete.side_effect = _rpcapi_accelerator_delete + self.addCleanup(p.stop) + + def test_delete(self): + response = self.delete('/accelerators/%s' % self.acc.uuid, + headers=self.headers) + self.assertEqual(http_client.NO_CONTENT, response.status_code) + self.mock_delete.assert_called_once_with(mock.ANY, mock.ANY) diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/base.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/base.py new file mode 100644 index 0000000..1d40ce0 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/base.py @@ -0,0 +1,71 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Cyborg DB test base class.""" + +import fixtures +from oslo_config import cfg +from oslo_db.sqlalchemy import enginefacade + +from cyborg.db import api as dbapi +from cyborg.db.sqlalchemy import migration +from cyborg.db.sqlalchemy import models +from cyborg.tests import base + + +CONF = cfg.CONF +_DB_CACHE = None + + +class Database(fixtures.Fixture): + + def __init__(self, engine, db_migrate, sql_connection): + self.sql_connection = sql_connection + + self.engine = engine + self.engine.dispose() + conn = self.engine.connect() + self.setup_sqlite(db_migrate) + + self._DB = ''.join(line for line in conn.connection.iterdump()) + self.engine.dispose() + + def setup_sqlite(self, db_migrate): + if db_migrate.version(): + return + models.Base.metadata.create_all(self.engine) + db_migrate.stamp('head') + + def setUp(self): + super(Database, self).setUp() + + conn = self.engine.connect() + conn.connection.executescript(self._DB) + self.addCleanup(self.engine.dispose) + + +class DbTestCase(base.TestCase): + + def setUp(self): + super(DbTestCase, self).setUp() + + self.dbapi = dbapi.get_instance() + + global _DB_CACHE + if not _DB_CACHE: + engine = enginefacade.get_legacy_facade().get_engine() + _DB_CACHE = Database(engine, migration, + sql_connection=CONF.database.connection) + self.useFixture(_DB_CACHE) diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/utils.py new file mode 100644 index 0000000..8290af1 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/db/utils.py @@ -0,0 +1,31 @@ +# Copyright 2017 Huawei Technologies Co.,LTD. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Cyborg db test utilities.""" + + +def get_test_accelerator(**kw): + return { + 'name': kw.get('name', 'name'), + 'description': kw.get('description', 'description'), + 'device_type': kw.get('device_type', 'device_type'), + 'acc_type': kw.get('acc_type', 'acc_type'), + 'acc_capability': kw.get('acc_capability', 'acc_capability'), + 'vendor_id': kw.get('vendor_id', 'vendor_id'), + 'product_id': kw.get('product_id', 'product_id'), + 'remotable': kw.get('remotable', 1), + 'project_id': kw.get('project_id', 'b492a6fb12964ae3bd291ce585107d48'), + 'user_id': kw.get('user_id', '7009409e21614d1db1ef7a8c5ee101d8'), + } diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_accelerator.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_accelerator.py new file mode 100644 index 0000000..da592e9 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_accelerator.py @@ -0,0 +1,66 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+
+
+def fake_db_accelerator(**updates):
+ db_accelerator = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': uuidutils.generate_uuid(),
+ 'name': 'fake-name',
+ 'description': 'fake-desc',
+ 'project_id': 'fake-pid',
+ 'user_id': 'fake-uid',
+ 'device_type': 'fake-dtype',
+ 'acc_type': 'fake-acc_type',
+ 'acc_capability': 'fake-cap',
+ 'vendor_id': 'fake-vid',
+ 'product_id': 'fake-pid',
+ 'remotable': 0
+ }
+
+ for name, field in objects.Accelerator.fields.items():
+ if name in db_accelerator:
+ continue
+ if field.nullable:
+ db_accelerator[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_accelerator[name] = field.default
+ else:
+ raise Exception('fake_db_accelerator needs help with %s' % name)
+
+ if updates:
+ db_accelerator.update(updates)
+
+ return db_accelerator
+
+
+def fake_accelerator_obj(context, obj_accelerator_class=None, **updates):
+ if obj_accelerator_class is None:
+ obj_accelerator_class = objects.Accelerator
+ expected_attrs = updates.pop('expected_attrs', None)
+ acc = obj_instance_class._from_db_object(context,
+ obj_instance_class(),
+ fake_db_instance(**updates),
+ expected_attrs=expected_attrs)
+ acc.obj_reset_changes()
+ return acc
\ No newline at end of file diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_deployable.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_deployable.py new file mode 100644 index 0000000..0f1c8c8 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_deployable.py @@ -0,0 +1,70 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+
+
+def fake_db_deployable(**updates):
+ root_uuid = uuidutils.generate_uuid()
+ db_deployable = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': root_uuid,
+ 'name': 'dp_name',
+ 'parent_uuid': None,
+ 'root_uuid': root_uuid,
+ 'pcie_address': '00:7f:0b.2',
+ 'host': 'host_name',
+ 'board': 'KU115',
+ 'vendor': 'Xilinx',
+ 'version': '1.0',
+ 'type': 'pf',
+ 'assignable': True,
+ 'instance_uuid': None,
+ 'availability': 'Available',
+ 'accelerator_id': 1
+ }
+
+ for name, field in objects.Deployable.fields.items():
+ if name in db_deployable:
+ continue
+ if field.nullable:
+ db_deployable[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_deployable[name] = field.default
+ else:
+ raise Exception('fake_db_deployable needs help with %s' % name)
+
+ if updates:
+ db_deployable.update(updates)
+
+ return db_deployable
+
+
+def fake_deployable_obj(context, obj_dpl_class=None, **updates):
+ if obj_dpl_class is None:
+ obj_dpl_class = objects.Deployable
+ expected_attrs = updates.pop('expected_attrs', None)
+ deploy = obj_dpl_class._from_db_object(context,
+ obj_dpl_class(),
+ fake_db_deployable(**updates),
+ expected_attrs=expected_attrs)
+ deploy.obj_reset_changes()
+ return deploy
\ No newline at end of file diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_physical_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_physical_function.py new file mode 100644 index 0000000..4a2bbb7 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_physical_function.py @@ -0,0 +1,72 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+from cyborg.objects import physical_function
+
+
+def fake_db_physical_function(**updates):
+ root_uuid = uuidutils.generate_uuid()
+ db_physical_function = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': root_uuid,
+ 'name': 'dp_name',
+ 'parent_uuid': None,
+ 'root_uuid': root_uuid,
+ 'pcie_address': '00:7f:0b.2',
+ 'host': 'host_name',
+ 'board': 'KU115',
+ 'vendor': 'Xilinx',
+ 'version': '1.0',
+ 'type': 'pf',
+ 'assignable': True,
+ 'instance_uuid': None,
+ 'availability': 'Available',
+ 'accelerator_id': 1
+ }
+
+ for name, field in physical_function.PhysicalFunction.fields.items():
+ if name in db_physical_function:
+ continue
+ if field.nullable:
+ db_physical_function[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_physical_function[name] = field.default
+ else:
+ raise Exception('fake_db_physical_function needs help with %s'
+ % name)
+
+ if updates:
+ db_physical_function.update(updates)
+
+ return db_physical_function
+
+
+def fake_physical_function_obj(context, obj_pf_class=None, **updates):
+ if obj_pf_class is None:
+ obj_pf_class = objects.VirtualFunction
+ expected_attrs = updates.pop('expected_attrs', None)
+ pf = obj_pf_class._from_db_object(context,
+ obj_pf_class(),
+ fake_db_physical_function(**updates),
+ expected_attrs=expected_attrs)
+ pf.obj_reset_changes()
+ return pf
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_virtual_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_virtual_function.py new file mode 100644 index 0000000..8184b0f --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/fake_virtual_function.py @@ -0,0 +1,72 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_serialization import jsonutils
+from oslo_utils import uuidutils
+
+from cyborg import objects
+from cyborg.objects import fields
+from cyborg.objects import virtual_function
+
+
+def fake_db_virtual_function(**updates):
+ root_uuid = uuidutils.generate_uuid()
+ db_virtual_function = {
+ 'id': 1,
+ 'deleted': False,
+ 'uuid': root_uuid,
+ 'name': 'dp_name',
+ 'parent_uuid': None,
+ 'root_uuid': root_uuid,
+ 'pcie_address': '00:7f:bb.2',
+ 'host': 'host_name',
+ 'board': 'KU115',
+ 'vendor': 'Xilinx',
+ 'version': '1.0',
+ 'type': 'vf',
+ 'assignable': True,
+ 'instance_uuid': None,
+ 'availability': 'Available',
+ 'accelerator_id': 1
+ }
+
+ for name, field in virtual_function.VirtualFunction.fields.items():
+ if name in db_virtual_function:
+ continue
+ if field.nullable:
+ db_virtual_function[name] = None
+ elif field.default != fields.UnspecifiedDefault:
+ db_virtual_function[name] = field.default
+ else:
+ raise Exception('fake_db_virtual_function needs help with %s'
+ % name)
+
+ if updates:
+ db_virtual_function.update(updates)
+
+ return db_virtual_function
+
+
+def fake_virtual_function_obj(context, obj_vf_class=None, **updates):
+ if obj_vf_class is None:
+ obj_vf_class = objects.VirtualFunction
+ expected_attrs = updates.pop('expected_attrs', None)
+ vf = obj_vf_class._from_db_object(context,
+ obj_vf_class(),
+ fake_db_virtual_function(**updates),
+ expected_attrs=expected_attrs)
+ vf.obj_reset_changes()
+ return vf
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_accelerator.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_accelerator.py new file mode 100644 index 0000000..1141d8c --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_accelerator.py @@ -0,0 +1,104 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestAcceleratorObject(DbTestCase):
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_accelerator(id=2)
+ return db_acc
+
+ @mock.patch.object(db.api.Connection, 'accelerator_create')
+ def test_create(self, mock_create):
+ mock_create.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_create.return_value)
+ acc.create(self.context)
+
+ self.assertEqual(self.fake_accelerator['id'], acc.id)
+
+ @mock.patch.object(db.api.Connection, 'accelerator_get')
+ def test_get(self, mock_get):
+ mock_get.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_get.return_value)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc['uuid'])
+ self.assertEqual(acc_get.uuid, acc.uuid)
+
+ @mock.patch.object(db.api.Connection, 'accelerator_update')
+ def test_save(self, mock_save):
+ mock_save.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_save.return_value)
+ acc.create(self.context)
+ acc.name = 'test_save'
+ acc.save(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc['uuid'])
+ self.assertEqual(acc_get.name, 'test_save')
+
+ @mock.patch.object(db.api.Connection, 'accelerator_delete')
+ def test_destroy(self, mock_destroy):
+ mock_destroy.return_value = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **mock_destroy.return_value)
+ acc.create(self.context)
+ self.assertEqual(self.fake_accelerator['id'], acc.id)
+ acc.destroy(self.context)
+ self.assertRaises(exception.AcceleratorNotFound,
+ objects.Accelerator.get, self.context,
+ acc['uuid'])
+
+
+class TestAcceleratorObject(test_objects._LocalTest,
+ _TestAcceleratorObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ accelerator = fake_accelerator.fake_accelerator_obj(self.context)
+ fields_with_save_methods = [field for field in accelerator.fields
+ if hasattr(accelerator,
+ '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(accelerator, '_save_%s' % field)
+ @mock.patch.object(accelerator, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ accelerator.obj_reset_changes(fields=[field])
+ accelerator._changed_fields.add(field)
+ self.assertRaises(expected_exception, accelerator.save)
+ accelerator.obj_reset_changes(fields=[field])
+ _test()
\ No newline at end of file diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_deployable.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_deployable.py new file mode 100644 index 0000000..fe3c6fc --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_deployable.py @@ -0,0 +1,151 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit import fake_deployable
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestDeployableObject(DbTestCase):
+ @property
+ def fake_deployable(self):
+ db_deploy = fake_deployable.fake_db_deployable(id=2)
+ return db_deploy
+
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_acceleraotr(id=2)
+ return db_acc
+
+ def test_create(self, mock_create):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ self.assertEqual(db_dpl['uuid'], dpl.uuid)
+
+
+
+ def test_get(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ dpl_get = objects.Deployable.get(self.context, dpl.uuid)
+ self.assertEqual(dpl_get.uuid, dpl.uuid)
+
+ def test_get_by_filter(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ query = {"uuid": dpl['uuid']}
+ dpl_get_list = objects.Deployable.get_by_filter(self.context, query)
+
+ self.assertEqual(dpl_get_list[0].uuid, dpl.uuid)
+
+ def test_save(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ dpl.host = 'test_save'
+ dpl.save(self.context)
+ dpl_get = objects.Deployable.get(self.context, dpl.uuid)
+ self.assertEqual(dpl_get.host, 'test_save')
+
+ def test_destroy(self):
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ db_dpl = self.fake_deployable
+ dpl = objects.Deployable(context=self.context,
+ **db_dpl)
+
+ dpl.accelerator_id = acc_get.id
+ dpl.create(self.context)
+ self.assertEqual(db_dpl['uuid'], dpl.uuid)
+ dpl.destroy(self.context)
+ self.assertRaises(exception.DeployableNotFound,
+ objects.Deployable.get, self.context,
+ dpl.uuid)
+
+
+class TestDeployableObject(test_objects._LocalTest,
+ _TestDeployableObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ deployable = fake_deployable.fake_deployable_obj(self.context)
+ fields_with_save_methods = [field for field in deployable.fields
+ if hasattr(deployable, '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(deployable, '_save_%s' % field)
+ @mock.patch.object(deployable, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ deployable.obj_reset_changes(fields=[field])
+ deployable._changed_fields.add(field)
+ self.assertRaises(expected_exception, deployable.save)
+ deployable.obj_reset_changes(fields=[field])
+ _test()
\ No newline at end of file diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_object.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_object.py new file mode 100644 index 0000000..35b574d --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_object.py @@ -0,0 +1,226 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import contextlib
+import copy
+import datetime
+import inspect
+import os
+import pprint
+
+import fixtures
+import mock
+from oslo_log import log
+from oslo_utils import timeutils
+from oslo_versionedobjects import base as ovo_base
+from oslo_versionedobjects import exception as ovo_exc
+from oslo_versionedobjects import fixture
+import six
+
+from oslo_context import context
+
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg.objects import fields
+from cyborg import tests as test
+
+
+LOG = log.getLogger(__name__)
+
+
+class MyOwnedObject(base.CyborgPersistentObject, base.CyborgObject):
+ VERSION = '1.0'
+ fields = {'baz': fields.IntegerField()}
+
+
+class MyObj(base.CyborgPersistentObject, base.CyborgObject,
+ base.CyborgObjectDictCompat):
+ VERSION = '1.6'
+ fields = {'foo': fields.IntegerField(default=1),
+ 'bar': fields.StringField(),
+ 'missing': fields.StringField(),
+ 'readonly': fields.IntegerField(read_only=True),
+ 'rel_object': fields.ObjectField('MyOwnedObject', nullable=True),
+ 'rel_objects': fields.ListOfObjectsField('MyOwnedObject',
+ nullable=True),
+ 'mutable_default': fields.ListOfStringsField(default=[]),
+ }
+
+ @staticmethod
+ def _from_db_object(context, obj, db_obj):
+ self = MyObj()
+ self.foo = db_obj['foo']
+ self.bar = db_obj['bar']
+ self.missing = db_obj['missing']
+ self.readonly = 1
+ self._context = context
+ return self
+
+ def obj_load_attr(self, attrname):
+ setattr(self, attrname, 'loaded!')
+
+ def query(cls, context):
+ obj = cls(context=context, foo=1, bar='bar')
+ obj.obj_reset_changes()
+ return obj
+
+ def marco(self):
+ return 'polo'
+
+ def _update_test(self):
+ self.bar = 'updated'
+
+ def save(self):
+ self.obj_reset_changes()
+
+ def refresh(self):
+ self.foo = 321
+ self.bar = 'refreshed'
+ self.obj_reset_changes()
+
+ def modify_save_modify(self):
+ self.bar = 'meow'
+ self.save()
+ self.foo = 42
+ self.rel_object = MyOwnedObject(baz=42)
+
+ def obj_make_compatible(self, primitive, target_version):
+ super(MyObj, self).obj_make_compatible(primitive, target_version)
+ # NOTE(danms): Simulate an older version that had a different
+ # format for the 'bar' attribute
+ if target_version == '1.1' and 'bar' in primitive:
+ primitive['bar'] = 'old%s' % primitive['bar']
+
+
+class RandomMixInWithNoFields(object):
+ """Used to test object inheritance using a mixin that has no fields."""
+ pass
+
+
+@base.CyborgObjectRegistry.register_if(False)
+class TestSubclassedObject(RandomMixInWithNoFields, MyObj):
+ fields = {'new_field': fields.StringField()}
+
+
+class TestObjToPrimitive(test.base.TestCase):
+
+ def test_obj_to_primitive_list(self):
+ @base.CyborgObjectRegistry.register_if(False)
+ class MyObjElement(base.CyborgObject):
+ fields = {'foo': fields.IntegerField()}
+
+ def __init__(self, foo):
+ super(MyObjElement, self).__init__()
+ self.foo = foo
+
+ @base.CyborgObjectRegistry.register_if(False)
+ class MyList(base.ObjectListBase, base.CyborgObject):
+ fields = {'objects': fields.ListOfObjectsField('MyObjElement')}
+
+ mylist = MyList()
+ mylist.objects = [MyObjElement(1), MyObjElement(2), MyObjElement(3)]
+ self.assertEqual([1, 2, 3],
+ [x['foo'] for x in base.obj_to_primitive(mylist)])
+
+ def test_obj_to_primitive_dict(self):
+ base.CyborgObjectRegistry.register(MyObj)
+ myobj = MyObj(foo=1, bar='foo')
+ self.assertEqual({'foo': 1, 'bar': 'foo'},
+ base.obj_to_primitive(myobj))
+
+ def test_obj_to_primitive_recursive(self):
+ base.CyborgObjectRegistry.register(MyObj)
+
+ class MyList(base.ObjectListBase, base.CyborgObject):
+ fields = {'objects': fields.ListOfObjectsField('MyObj')}
+
+ mylist = MyList(objects=[MyObj(), MyObj()])
+ for i, value in enumerate(mylist):
+ value.foo = i
+ self.assertEqual([{'foo': 0}, {'foo': 1}],
+ base.obj_to_primitive(mylist))
+
+ def test_obj_to_primitive_with_ip_addr(self):
+ @base.CyborgObjectRegistry.register_if(False)
+ class TestObject(base.CyborgObject):
+ fields = {'addr': fields.IPAddressField(),
+ 'cidr': fields.IPNetworkField()}
+
+ obj = TestObject(addr='1.2.3.4', cidr='1.1.1.1/16')
+ self.assertEqual({'addr': '1.2.3.4', 'cidr': '1.1.1.1/16'},
+ base.obj_to_primitive(obj))
+
+
+def compare_obj(test, obj, db_obj, subs=None, allow_missing=None,
+ comparators=None):
+ """Compare a CyborgObject and a dict-like database object.
+ This automatically converts TZ-aware datetimes and iterates over
+ the fields of the object.
+ :param:test: The TestCase doing the comparison
+ :param:obj: The CyborgObject to examine
+ :param:db_obj: The dict-like database object to use as reference
+ :param:subs: A dict of objkey=dbkey field substitutions
+ :param:allow_missing: A list of fields that may not be in db_obj
+ :param:comparators: Map of comparator functions to use for certain fields
+ """
+
+ if subs is None:
+ subs = {}
+ if allow_missing is None:
+ allow_missing = []
+ if comparators is None:
+ comparators = {}
+
+ for key in obj.fields:
+ if key in allow_missing and not obj.obj_attr_is_set(key):
+ continue
+ obj_val = getattr(obj, key)
+ db_key = subs.get(key, key)
+ db_val = db_obj[db_key]
+ if isinstance(obj_val, datetime.datetime):
+ obj_val = obj_val.replace(tzinfo=None)
+
+ if key in comparators:
+ comparator = comparators[key]
+ comparator(db_val, obj_val)
+ else:
+ test.assertEqual(db_val, obj_val)
+
+
+class _BaseTestCase(test.base.TestCase):
+ def setUp(self):
+ super(_BaseTestCase, self).setUp()
+ self.user_id = 'fake-user'
+ self.project_id = 'fake-project'
+ self.context = context.RequestContext(self.user_id, self.project_id)
+
+ base.CyborgObjectRegistry.register(MyObj)
+ base.CyborgObjectRegistry.register(MyOwnedObject)
+
+ def compare_obj(self, obj, db_obj, subs=None, allow_missing=None,
+ comparators=None):
+ compare_obj(self, obj, db_obj, subs=subs, allow_missing=allow_missing,
+ comparators=comparators)
+
+ def str_comparator(self, expected, obj_val):
+ """Compare an object field to a string in the db by performing
+ a simple coercion on the object field value.
+ """
+ self.assertEqual(expected, str(obj_val))
+
+
+class _LocalTest(_BaseTestCase):
+ def setUp(self):
+ super(_LocalTest, self).setUp()
\ No newline at end of file diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_physical_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_physical_function.py new file mode 100644 index 0000000..2fa2ab1 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_physical_function.py @@ -0,0 +1,186 @@ +import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_physical_function
+from cyborg.tests.unit import fake_virtual_function
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestPhysicalFunctionObject(DbTestCase):
+ @property
+ def fake_physical_function(self):
+ db_pf = fake_physical_function.fake_db_physical_function(id=1)
+ return db_pf
+
+ @property
+ def fake_virtual_function(self):
+ db_vf = fake_virtual_function.fake_db_virtual_function(id=3)
+ return db_vf
+
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_accelerator(id=2)
+ return db_acc
+
+ def test_create(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+
+ self.assertEqual(db_pf['uuid'], pf.uuid)
+
+ def test_get(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ self.assertEqual(pf_get.uuid, pf.uuid)
+
+ def test_get_by_filter(self):
+ db_acc = self.fake_accelerator
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = pf_get.accelerator_id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ pf_get.add_vf(vf_get)
+
+ pf_get.save(self.context)
+
+ query = {"vendor": pf['vendor']}
+ pf_get_list = objects.PhysicalFunction.get_by_filter(self.context,
+ query)
+
+ self.assertEqual(len(pf_get_list), 1)
+ self.assertEqual(pf_get_list[0].uuid, pf.uuid)
+ self.assertEqual(objects.PhysicalFunction, type(pf_get_list[0]))
+ self.assertEqual(objects.VirtualFunction,
+ type(pf_get_list[0].virtual_function_list[0]))
+ self.assertEqual(pf_get_list[0].virtual_function_list[0].uuid,
+ vf.uuid)
+
+ def test_save(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ pf_get.host = 'test_save'
+
+ pf_get.save(self.context)
+ pf_get_2 = objects.PhysicalFunction.get(self.context, pf.uuid)
+ self.assertEqual(pf_get_2.host, 'test_save')
+
+ def test_destroy(self):
+ db_pf = self.fake_physical_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ self.assertEqual(db_pf['uuid'], pf_get.uuid)
+ pf_get.destroy(self.context)
+ self.assertRaises(exception.DeployableNotFound,
+ objects.PhysicalFunction.get, self.context,
+ pf_get['uuid'])
+
+ def test_add_vf(self):
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = pf_get.accelerator_id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+
+ pf_get.add_vf(vf_get)
+
+ pf_get.save(self.context)
+ pf_get_2 = objects.PhysicalFunction.get(self.context, pf.uuid)
+
+ self.assertEqual(db_vf['uuid'],
+ pf_get_2.virtual_function_list[0].uuid)
+
+
+class TestPhysicalFunctionObject(test_objects._LocalTest,
+ _TestPhysicalFunctionObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ pf = fake_physical_function.physical_function_obj(self.context)
+ fields_with_save_methods = [field for field in pf.fields
+ if hasattr(pf, '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(pf, '_save_%s' % field)
+ @mock.patch.object(pf, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ pf.obj_reset_changes(fields=[field])
+ pf._changed_fields.add(field)
+ self.assertRaises(expected_exception, pf.save)
+ pf.obj_reset_changes(fields=[field])
+ _test()
\ No newline at end of file diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_virtual_function.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_virtual_function.py new file mode 100644 index 0000000..fea300f --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/test_virtual_function.py @@ -0,0 +1,202 @@ +# Copyright 2018 Huawei Technologies Co.,LTD.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import mock
+import netaddr
+from oslo_db import exception as db_exc
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+from oslo_context import context
+
+from cyborg import db
+from cyborg.common import exception
+from cyborg import objects
+from cyborg.objects import base
+from cyborg import tests as test
+from cyborg.tests.unit import fake_physical_function
+from cyborg.tests.unit import fake_virtual_function
+from cyborg.tests.unit import fake_accelerator
+from cyborg.tests.unit.objects import test_objects
+from cyborg.tests.unit.db.base import DbTestCase
+
+
+class _TestVirtualFunctionObject(DbTestCase):
+ @property
+ def fake_accelerator(self):
+ db_acc = fake_accelerator.fake_db_accelerator(id=1)
+ return db_acc
+
+ @property
+ def fake_virtual_function(self):
+ db_vf = fake_virtual_function.fake_db_virtual_function(id=2)
+ return db_vf
+
+ @property
+ def fake_physical_function(self):
+ db_pf = fake_physical_function.fake_db_physical_function(id=3)
+ return db_pf
+
+ def test_create(self):
+ db_acc = self.fake_accelerator
+ db_vf = self.fake_virtual_function
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+
+ self.assertEqual(db_vf['uuid'], vf.uuid)
+
+ def test_get(self):
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ self.assertEqual(vf_get.uuid, vf.uuid)
+
+ def test_get_by_filter(self):
+ db_acc = self.fake_accelerator
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = pf_get.accelerator_id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ pf_get.add_vf(vf_get)
+ pf_get.save(self.context)
+
+ query = {"vendor": pf_get['vendor']}
+ vf_get_list = objects.VirtualFunction.get_by_filter(self.context,
+ query)
+
+ self.assertEqual(len(vf_get_list), 1)
+ self.assertEqual(vf_get_list[0].uuid, vf.uuid)
+ self.assertEqual(objects.VirtualFunction, type(vf_get_list[0]))
+ self.assertEqual(1, 1)
+
+ def test_get_by_filter2(self):
+ db_acc = self.fake_accelerator
+
+ db_pf = self.fake_physical_function
+ db_vf = self.fake_virtual_function
+
+ db_pf2 = self.fake_physical_function
+ db_vf2 = self.fake_virtual_function
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ pf = objects.PhysicalFunction(context=self.context,
+ **db_pf)
+
+ pf.accelerator_id = acc_get.id
+ pf.create(self.context)
+ pf_get = objects.PhysicalFunction.get(self.context, pf.uuid)
+ pf2 = objects.PhysicalFunction(context=self.context,
+ **db_pf2)
+
+ pf2.accelerator_id = acc_get.id
+ pf2.create(self.context)
+ pf_get2 = objects.PhysicalFunction.get(self.context, pf2.uuid)
+ query = {"uuid": pf2.uuid}
+
+ pf_get_list = objects.PhysicalFunction.get_by_filter(self.context,
+ query)
+ self.assertEqual(1, 1)
+
+ def test_save(self):
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ vf_get.host = 'test_save'
+ vf_get.save(self.context)
+ vf_get_2 = objects.VirtualFunction.get(self.context, vf.uuid)
+ self.assertEqual(vf_get_2.host, 'test_save')
+
+ def test_destroy(self):
+ db_vf = self.fake_virtual_function
+ db_acc = self.fake_accelerator
+
+ acc = objects.Accelerator(context=self.context,
+ **db_acc)
+ acc.create(self.context)
+ acc_get = objects.Accelerator.get(self.context, acc.uuid)
+ vf = objects.VirtualFunction(context=self.context,
+ **db_vf)
+ vf.accelerator_id = acc_get.id
+ vf.create(self.context)
+ vf_get = objects.VirtualFunction.get(self.context, vf.uuid)
+ self.assertEqual(db_vf['uuid'], vf_get.uuid)
+ vf_get.destroy(self.context)
+ self.assertRaises(exception.DeployableNotFound,
+ objects.VirtualFunction.get, self.context,
+ vf_get['uuid'])
+
+
+class TestVirtualFunctionObject(test_objects._LocalTest,
+ _TestVirtualFunctionObject):
+ def _test_save_objectfield_fk_constraint_fails(self, foreign_key,
+ expected_exception):
+
+ error = db_exc.DBReferenceError('table', 'constraint', foreign_key,
+ 'key_table')
+ # Prevent lazy-loading any fields, results in InstanceNotFound
+ vf = fake_virtual_function.virtual_function_obj(self.context)
+ fields_with_save_methods = [field for field in vf.fields
+ if hasattr(vf, '_save_%s' % field)]
+ for field in fields_with_save_methods:
+ @mock.patch.object(vf, '_save_%s' % field)
+ @mock.patch.object(vf, 'obj_attr_is_set')
+ def _test(mock_is_set, mock_save_field):
+ mock_is_set.return_value = True
+ mock_save_field.side_effect = error
+ vf.obj_reset_changes(fields=[field])
+ vf._changed_fields.add(field)
+ self.assertRaises(expected_exception, vf.save)
+ vf.obj_reset_changes(fields=[field])
+ _test()
\ No newline at end of file diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/utils.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/utils.py new file mode 100644 index 0000000..99a1e83 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/objects/utils.py @@ -0,0 +1,41 @@ +# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Cyborg object test utilities."""
+
+from cyborg import objects
+from cyborg.tests.unit.db import utils as db_utils
+
+
+def get_test_accelerator(ctxt, **kw):
+ """Return an Accelerator object with appropriate attributes.
+
+ NOTE: The object leaves the attributes marked as changed, such
+ that a create() could be used to commit it to the DB.
+ """
+ test_acc = db_utils.get_test_accelerator(**kw)
+ obj_acc = objects.Accelerator(ctxt, **test_acc)
+ return obj_acc
+
+
+def create_test_accelerator(ctxt, **kw):
+ """Create and return a test accelerator object.
+
+ Create an accelerator in the DB and return an Accelerator object with
+ appropriate attributes.
+ """
+ acc = get_test_accelerator(ctxt, **kw)
+ acc.create(ctxt)
+ return acc
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/policy_fixture.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/policy_fixture.py new file mode 100644 index 0000000..6fad440 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/policy_fixture.py @@ -0,0 +1,44 @@ +# Copyright 2017 Huawei Technologies Co.,LTD.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import fixtures
+from oslo_config import cfg
+from oslo_policy import opts as policy_opts
+
+from cyborg.common import policy as cyborg_policy
+
+CONF = cfg.CONF
+
+policy_data = """
+{
+
+}
+"""
+
+
+class PolicyFixture(fixtures.Fixture):
+ def setUp(self):
+ super(PolicyFixture, self).setUp()
+ self.policy_dir = self.useFixture(fixtures.TempDir())
+ self.policy_file_name = os.path.join(self.policy_dir.path,
+ 'policy.json')
+ with open(self.policy_file_name, 'w') as policy_file:
+ policy_file.write(policy_data)
+ policy_opts.set_defaults(CONF)
+ CONF.set_override('policy_file', self.policy_file_name, 'oslo_policy')
+ cyborg_policy._ENFORCER = None
+ self.addCleanup(cyborg_policy.get_enforcer().clear)
diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/__init__.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/__init__.py diff --git a/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/test_placement_client.py b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/test_placement_client.py new file mode 100644 index 0000000..131e314 --- /dev/null +++ b/cyborg_enhancement/mitaka_version/cyborg/cyborg/tests/unit/services/test_placement_client.py @@ -0,0 +1,123 @@ +# Copyright (c) 2018 Lenovo Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cyborg.tests import base
+import mock
+from cyborg.services import report as placement_client
+from oslo_utils import uuidutils
+from cyborg.common import exception as c_exc
+from keystoneauth1 import exceptions as ks_exc
+from oslo_config import cfg
+
+
+class PlacementAPIClientTestCase(base.DietTestCase):
+ """Test the Placement API client."""
+
+ def setUp(self):
+ super(PlacementAPIClientTestCase, self).setUp()
+ self.mock_load_auth_p = mock.patch(
+ 'keystoneauth1.loading.load_auth_from_conf_options')
+ self.mock_load_auth = self.mock_load_auth_p.start()
+ self.mock_request_p = mock.patch(
+ 'keystoneauth1.session.Session.request')
+ self.mock_request = self.mock_request_p.start()
+ self.client = placement_client.SchedulerReportClient()
+
+ @mock.patch('keystoneauth1.session.Session')
+ @mock.patch('keystoneauth1.loading.load_auth_from_conf_options')
+ def test_constructor(self, load_auth_mock, ks_sess_mock):
+ placement_client.SchedulerReportClient()
+
+ load_auth_mock.assert_called_once_with(cfg.CONF, 'placement')
+ ks_sess_mock.assert_called_once_with(auth=load_auth_mock.return_value,
+ cert=None,
+ timeout=None,
+ verify=True)
+
+ def test_create_resource_provider(self):
+ expected_payload = 'fake_resource_provider'
+ self.client.create_resource_provider(expected_payload)
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ expected_url = '/resource_providers'
+ self.mock_request.assert_called_once_with(expected_url, 'POST',
+ endpoint_filter=e_filter,
+ json=expected_payload)
+
+ def test_delete_resource_provider(self):
+ rp_uuid = uuidutils.generate_uuid()
+ self.client.delete_resource_provider(rp_uuid)
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ expected_url = '/resource_providers/%s' % rp_uuid
+ self.mock_request.assert_called_once_with(expected_url, 'DELETE',
+ endpoint_filter=e_filter)
+
+ def test_create_inventory(self):
+ expected_payload = 'fake_inventory'
+ rp_uuid = uuidutils.generate_uuid()
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ self.client.create_inventory(rp_uuid, expected_payload)
+ expected_url = '/resource_providers/%s/inventories' % rp_uuid
+ self.mock_request.assert_called_once_with(expected_url, 'POST',
+ endpoint_filter=e_filter,
+ json=expected_payload)
+
+ def test_get_inventory(self):
+ rp_uuid = uuidutils.generate_uuid()
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ resource_class = 'fake_resource_class'
+ self.client.get_inventory(rp_uuid, resource_class)
+ expected_url = '/resource_providers/%s/inventories/%s' % (
+ rp_uuid, resource_class)
+ self.mock_request.assert_called_once_with(expected_url, 'GET',
+ endpoint_filter=e_filter)
+
+ def _test_get_inventory_not_found(self, details, expected_exception):
+ rp_uuid = uuidutils.generate_uuid()
+ resource_class = 'fake_resource_class'
+ self.mock_request.side_effect = ks_exc.NotFound(details=details)
+ self.assertRaises(expected_exception, self.client.get_inventory,
+ rp_uuid, resource_class)
+
+ def test_get_inventory_not_found_no_resource_provider(self):
+ self._test_get_inventory_not_found(
+ "No resource provider with uuid",
+ c_exc.PlacementResourceProviderNotFound)
+
+ def test_get_inventory_not_found_no_inventory(self):
+ self._test_get_inventory_not_found(
+ "No inventory of class", c_exc.PlacementInventoryNotFound)
+
+ def test_get_inventory_not_found_unknown_cause(self):
+ self._test_get_inventory_not_found("Unknown cause", ks_exc.NotFound)
+
+ def test_update_inventory(self):
+ expected_payload = 'fake_inventory'
+ rp_uuid = uuidutils.generate_uuid()
+ e_filter = {'region_name': mock.ANY, 'service_type': 'placement'}
+ resource_class = 'fake_resource_class'
+ self.client.update_inventory(rp_uuid, expected_payload, resource_class)
+ expected_url = '/resource_providers/%s/inventories/%s' % (
+ rp_uuid, resource_class)
+ self.mock_request.assert_called_once_with(expected_url, 'PUT',
+ endpoint_filter=e_filter,
+ json=expected_payload)
+
+ def test_update_inventory_conflict(self):
+ rp_uuid = uuidutils.generate_uuid()
+ expected_payload = 'fake_inventory'
+ resource_class = 'fake_resource_class'
+ self.mock_request.side_effect = ks_exc.Conflict
+ self.assertRaises(c_exc.PlacementInventoryUpdateConflict,
+ self.client.update_inventory, rp_uuid,
+ expected_payload, resource_class)
\ No newline at end of file |