diff options
Diffstat (limited to 'networking_sfc/db')
17 files changed, 1279 insertions, 0 deletions
diff --git a/networking_sfc/db/__init__.py b/networking_sfc/db/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/networking_sfc/db/__init__.py diff --git a/networking_sfc/db/flowclassifier_db.py b/networking_sfc/db/flowclassifier_db.py new file mode 100644 index 0000000..44e1c89 --- /dev/null +++ b/networking_sfc/db/flowclassifier_db.py @@ -0,0 +1,211 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from oslo_log import helpers as log_helpers +from oslo_log import log as logging +from oslo_utils import uuidutils + +import sqlalchemy as sa +from sqlalchemy import orm +from sqlalchemy.orm.collections import attribute_mapped_collection +from sqlalchemy.orm import exc + +from neutron.common import constants as const +from neutron.db import common_db_mixin +from neutron.db import model_base +from neutron.db import models_v2 +from neutron.i18n import _LI + +from networking_sfc.extensions import flowclassifier as fc_ext + +LOG = logging.getLogger(__name__) + + +class L7Parameter(model_base.BASEV2): + """Represents a L7 parameter.""" + __tablename__ = 'sfc_flow_classifier_l7_parameters' + keyword = sa.Column(sa.String(255), primary_key=True) + value = sa.Column(sa.String(255)) + classifier_id = sa.Column( + sa.String(36), + sa.ForeignKey('sfc_flow_classifiers.id', ondelete='CASCADE'), + primary_key=True) + + +class FlowClassifier(model_base.BASEV2, models_v2.HasId, + models_v2.HasTenant): + """Represents a v2 neutron flow classifier.""" + __tablename__ = 'sfc_flow_classifiers' + + name = sa.Column(sa.String(255)) + ethertype = sa.Column(sa.String(40)) + protocol = sa.Column(sa.String(40)) + description = sa.Column(sa.String(255)) + source_port_range_min = sa.Column(sa.Integer) + source_port_range_max = sa.Column(sa.Integer) + destination_port_range_min = sa.Column(sa.Integer) + destination_port_range_max = sa.Column(sa.Integer) + source_ip_prefix = sa.Column(sa.String(255)) + destination_ip_prefix = sa.Column(sa.String(255)) + l7_parameters = orm.relationship( + L7Parameter, + collection_class=attribute_mapped_collection('keyword'), + cascade='all, delete-orphan') + + +class FlowClassifierDbPlugin(fc_ext.FlowClassifierPluginBase, + common_db_mixin.CommonDbMixin): + + def _check_port_range_valid(self, port_range_min, + port_range_max, + protocol): + if ( + port_range_min is not None and + port_range_max is not None and + port_range_min > port_range_max + ): + raise fc_ext.FlowClassifierInvalidPortRange( + port_range_min=port_range_min, + port_range_max=port_range_max + ) + if port_range_min is not None or port_range_max is not None: + if protocol not in [const.PROTO_NAME_TCP, const.PROTO_NAME_UDP]: + raise fc_ext.FlowClassifierProtocolRequiredWithPorts() + + def _get_fixed_ip_from_port(self, context, logical_port, ip_prefix): + if logical_port is not None: + self._get_port(context, logical_port) + return ip_prefix + + @log_helpers.log_method_call + def create_flow_classifier(self, context, flow_classifier): + fc = flow_classifier['flow_classifier'] + tenant_id = self._get_tenant_id_for_create(context, fc) + l7_parameters = { + key: L7Parameter(key, val) + for key, val in six.iteritems(fc['l7_parameters'])} + source_port_range_min = fc['source_port_range_min'] + source_port_range_max = fc['source_port_range_max'] + + self._check_port_range_valid(source_port_range_min, + source_port_range_max, + fc['protocol']) + destination_port_range_min = fc['destination_port_range_min'] + destination_port_range_max = fc['destination_port_range_max'] + self._check_port_range_valid(destination_port_range_min, + destination_port_range_max, + fc['protocol']) + source_ip_prefix = fc['source_ip_prefix'] + destination_ip_prefix = fc['destination_ip_prefix'] + + logical_source_port = fc['logical_source_port'] + logical_destination_port = fc['logical_destination_port'] + with context.session.begin(subtransactions=True): + source_ip_prefix = self._get_fixed_ip_from_port( + context, logical_source_port, source_ip_prefix) + destination_ip_prefix = self._get_fixed_ip_from_port( + context, logical_destination_port, destination_ip_prefix) + flow_classifier_db = FlowClassifier( + id=uuidutils.generate_uuid(), + tenant_id=tenant_id, + name=fc['name'], + description=fc['description'], + ethertype=fc['ethertype'], + protocol=fc['protocol'], + source_port_range_min=source_port_range_min, + source_port_range_max=source_port_range_max, + destination_port_range_min=destination_port_range_min, + destination_port_range_max=destination_port_range_max, + source_ip_prefix=source_ip_prefix, + destination_ip_prefix=destination_ip_prefix, + l7_parameters=l7_parameters + ) + context.session.add(flow_classifier_db) + return self._make_flow_classifier_dict(flow_classifier_db) + + def _make_flow_classifier_dict(self, flow_classifier, fields=None): + res = { + 'id': flow_classifier['id'], + 'name': flow_classifier['name'], + 'description': flow_classifier['description'], + 'tenant_id': flow_classifier['tenant_id'], + 'ethertype': flow_classifier['ethertype'], + 'protocol': flow_classifier['protocol'], + 'source_port_range_min': flow_classifier['source_port_range_min'], + 'source_port_range_max': flow_classifier['source_port_range_max'], + 'destination_port_range_min': ( + flow_classifier['destination_port_range_min']), + 'destination_port_range_max': ( + flow_classifier['destination_port_range_max']), + 'source_ip_prefix': flow_classifier['source_ip_prefix'], + 'destination_ip_prefix': flow_classifier['destination_ip_prefix'], + 'l7_parameters': { + param['keyword']: param['value'] + for k, param in six.iteritems(flow_classifier.l7_parameters) + } + + } + return self._fields(res, fields) + + @log_helpers.log_method_call + def get_flow_classifiers(self, context, filters=None, fields=None, + sorts=None, limit=None, marker=None, + page_reverse=False): + marker_obj = self._get_marker_obj(context, 'flow_classifier', + limit, marker) + return self._get_collection(context, + FlowClassifier, + self._make_flow_classifier_dict, + filters=filters, fields=fields, + sorts=sorts, + limit=limit, marker_obj=marker_obj, + page_reverse=page_reverse) + + @log_helpers.log_method_call + def get_flow_classifier(self, context, id, fields=None): + flow_classifier = self._get_flow_classifier(context, id) + return self._make_flow_classifier_dict(flow_classifier, fields) + + def _get_flow_classifier(self, context, id): + try: + return self._get_by_id(context, FlowClassifier, id) + except exc.NoResultFound: + raise fc_ext.FlowClassifierNotFound(id=id) + + def _get_port(self, context, id): + try: + return self._get_by_id(context, models_v2.Port, id) + except exc.NoResultFound: + raise fc_ext.FlowClassifierPortNotFound(id=id) + + @log_helpers.log_method_call + def update_flow_classifier(self, context, id, flow_classifier): + new_fc = flow_classifier['flow_classifier'] + with context.session.begin(subtransactions=True): + old_fc = self._get_flow_classifier(context, id) + old_fc.update(new_fc) + return self._make_flow_classifier_dict(old_fc) + + @log_helpers.log_method_call + def delete_flow_classifier(self, context, id): + try: + with context.session.begin(subtransactions=True): + fc = self._get_flow_classifier(context, id) + context.session.delete(fc) + except AssertionError: + raise fc_ext.FlowClassifierInUse(id=id) + except fc_ext.FlowClassifierNotFound: + LOG.info(_LI("Deleting a non-existing flow classifier.")) diff --git a/networking_sfc/db/migration/README b/networking_sfc/db/migration/README new file mode 100644 index 0000000..20c6fb9 --- /dev/null +++ b/networking_sfc/db/migration/README @@ -0,0 +1,3 @@ +For details refer to: +http://docs.openstack.org/developer/networking-sfc/alembic_migration.html + diff --git a/networking_sfc/db/migration/__init__.py b/networking_sfc/db/migration/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/networking_sfc/db/migration/__init__.py diff --git a/networking_sfc/db/migration/alembic_migrations/__init__.py b/networking_sfc/db/migration/alembic_migrations/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/__init__.py diff --git a/networking_sfc/db/migration/alembic_migrations/env.py b/networking_sfc/db/migration/alembic_migrations/env.py new file mode 100644 index 0000000..e2f858a --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/env.py @@ -0,0 +1,88 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from logging import config as logging_config + +from alembic import context +from neutron.db import model_base +from oslo_config import cfg +from oslo_db.sqlalchemy import session +import sqlalchemy as sa +from sqlalchemy import event + +from networking_sfc.db.migration.models import head # noqa + + +MYSQL_ENGINE = None +SFC_VERSION_TABLE = 'alembic_version_sfc' +config = context.config +neutron_config = config.neutron_config +logging_config.fileConfig(config.config_file_name) +target_metadata = model_base.BASEV2.metadata + + +def set_mysql_engine(): + try: + mysql_engine = neutron_config.command.mysql_engine + except cfg.NoSuchOptError: + mysql_engine = None + + global MYSQL_ENGINE + MYSQL_ENGINE = (mysql_engine or + model_base.BASEV2.__table_args__['mysql_engine']) + + +def run_migrations_offline(): + set_mysql_engine() + + kwargs = dict() + if neutron_config.database.connection: + kwargs['url'] = neutron_config.database.connection + else: + kwargs['dialect_name'] = neutron_config.database.engine + kwargs['version_table'] = SFC_VERSION_TABLE + context.configure(**kwargs) + + with context.begin_transaction(): + context.run_migrations() + + +@event.listens_for(sa.Table, 'after_parent_attach') +def set_storage_engine(target, parent): + if MYSQL_ENGINE: + target.kwargs['mysql_engine'] = MYSQL_ENGINE + + +def run_migrations_online(): + set_mysql_engine() + engine = session.create_engine(neutron_config.database.connection) + + connection = engine.connect() + context.configure( + connection=connection, + target_metadata=target_metadata, + version_table=SFC_VERSION_TABLE + ) + try: + with context.begin_transaction(): + context.run_migrations() + finally: + connection.close() + engine.dispose() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/networking_sfc/db/migration/alembic_migrations/script.py.mako b/networking_sfc/db/migration/alembic_migrations/script.py.mako new file mode 100644 index 0000000..5f14159 --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/script.py.mako @@ -0,0 +1,36 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision} +Create Date: ${create_date} + +""" + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +% if branch_labels: +branch_labels = ${repr(branch_labels)} +%endif + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +def upgrade(): + ${upgrades if upgrades else "pass"} diff --git a/networking_sfc/db/migration/alembic_migrations/versions/HEADS b/networking_sfc/db/migration/alembic_migrations/versions/HEADS new file mode 100644 index 0000000..152ab9e --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/versions/HEADS @@ -0,0 +1,2 @@ +48072cb59133 +5a475fc853e6 diff --git a/networking_sfc/db/migration/alembic_migrations/versions/liberty/contract/48072cb59133_initial.py b/networking_sfc/db/migration/alembic_migrations/versions/liberty/contract/48072cb59133_initial.py new file mode 100644 index 0000000..87b53ce --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/versions/liberty/contract/48072cb59133_initial.py @@ -0,0 +1,33 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Initial Mitaka no-op script. + +Revision ID: 48072cb59133 +Revises: start_networking_sfc +Create Date: 2015-07-28 22:18:13.330846 + +""" + +from neutron.db.migration import cli + + +# revision identifiers, used by Alembic. +revision = '48072cb59133' +down_revision = 'start_networking_sfc' +branch_labels = (cli.CONTRACT_BRANCH,) + + +def upgrade(): + pass diff --git a/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/24fc7241aa5_initial.py b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/24fc7241aa5_initial.py new file mode 100644 index 0000000..f23c9b3 --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/24fc7241aa5_initial.py @@ -0,0 +1,33 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Initial Mitaka no-op script. + +Revision ID: 24fc7241aa5 +Revises: start_networking_sfc +Create Date: 2015-09-11 11:37:19.349951 + +""" + +from neutron.db.migration import cli + + +# revision identifiers, used by Alembic. +revision = '24fc7241aa5' +down_revision = 'start_networking_sfc' +branch_labels = (cli.EXPAND_BRANCH,) + + +def upgrade(): + pass diff --git a/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/5a475fc853e6_ovs_data_model.py b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/5a475fc853e6_ovs_data_model.py new file mode 100644 index 0000000..e257548 --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/5a475fc853e6_ovs_data_model.py @@ -0,0 +1,87 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Defining OVS data-model + +Revision ID: 5a475fc853e6 +Revises: c3e178d4a985 +Create Date: 2015-09-30 18:00:57.758762 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '5a475fc853e6' +down_revision = 'c3e178d4a985' + + +def upgrade(): + op.create_table('sfc_portpair_details', + sa.Column('tenant_id', sa.String(length=255), nullable=True), + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('ingress', sa.String(length=36), nullable=True), + sa.Column('egress', sa.String(length=36), nullable=True), + sa.Column('host_id', sa.String(length=255), nullable=False), + sa.Column('mac_address', sa.String(length=32), nullable=False), + sa.Column('network_type', sa.String(length=8), nullable=True), + sa.Column('segment_id', sa.Integer(), nullable=True), + sa.Column('local_endpoint', sa.String(length=64), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + + op.create_index( + op.f('ix_sfc_portpair_details_tenant_id'), + 'sfc_portpair_details', ['tenant_id'], unique=False + ) + op.create_table('sfc_uuid_intid_associations', + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=False), + sa.Column('intid', sa.Integer(), nullable=False), + sa.Column('type_', sa.String(length=32), nullable=False), + sa.PrimaryKeyConstraint('id', 'uuid'), + sa.UniqueConstraint('intid') + ) + + op.create_table('sfc_path_nodes', + sa.Column('tenant_id', sa.String(length=255), nullable=True), + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('nsp', sa.Integer(), nullable=False), + sa.Column('nsi', sa.Integer(), nullable=False), + sa.Column('node_type', sa.String(length=32), nullable=True), + sa.Column('portchain_id', sa.String(length=255), nullable=True), + sa.Column('status', sa.String(length=32), nullable=True), + sa.Column('next_group_id', sa.Integer(), nullable=True), + sa.Column('next_hop', sa.String(length=512), nullable=True), + sa.ForeignKeyConstraint(['portchain_id'], ['sfc_port_chains.id'], + ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id') + ) + op.create_index( + op.f('ix_sfc_path_nodes_tenant_id'), + 'sfc_path_nodes', ['tenant_id'], unique=False + ) + + op.create_table('sfc_path_port_associations', + sa.Column('pathnode_id', sa.String(length=36), nullable=False), + sa.Column('portpair_id', sa.String(length=36), nullable=False), + sa.Column('weight', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['pathnode_id'], ['sfc_path_nodes.id'], + ondelete='CASCADE'), + sa.ForeignKeyConstraint(['portpair_id'], ['sfc_portpair_details.id'], + ondelete='CASCADE'), + sa.PrimaryKeyConstraint('pathnode_id', 'portpair_id') + ) diff --git a/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/9768e6a66c9_flowclassifier_data_model.py b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/9768e6a66c9_flowclassifier_data_model.py new file mode 100644 index 0000000..e43e9c1 --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/9768e6a66c9_flowclassifier_data_model.py @@ -0,0 +1,61 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Defining flow-classifier data-model + +Revision ID: 9768e6a66c9 +Revises: 24fc7241aa5 +Create Date: 2015-09-30 17:54:35.852573 + +""" + +from alembic import op +import sqlalchemy as sa + +from neutron.api.v2 import attributes as attr + +# revision identifiers, used by Alembic. +revision = '9768e6a66c9' +down_revision = '24fc7241aa5' + + +def upgrade(): + op.create_table( + 'sfc_flow_classifiers', + sa.Column('tenant_id', sa.String(length=attr.TENANT_ID_MAX_LEN), + nullable=True, index=True), + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('name', sa.String(length=attr.NAME_MAX_LEN), nullable=True), + sa.Column('ethertype', sa.String(length=40), nullable=True), + sa.Column('protocol', sa.String(length=40), nullable=True), + sa.Column('description', sa.String(length=attr.DESCRIPTION_MAX_LEN), + nullable=True), + sa.Column('source_port_range_min', sa.Integer(), nullable=True), + sa.Column('source_port_range_max', sa.Integer(), nullable=True), + sa.Column('destination_port_range_min', sa.Integer(), nullable=True), + sa.Column('destination_port_range_max', sa.Integer(), nullable=True), + sa.Column('source_ip_prefix', sa.String(length=255), nullable=True), + sa.Column('destination_ip_prefix', sa.String(length=255), + nullable=True), + sa.PrimaryKeyConstraint('id') + ) + + op.create_table( + 'sfc_flow_classifier_l7_parameters', + sa.Column('keyword', sa.String(length=255), nullable=False), + sa.Column('value', sa.String(length=255), nullable=True), + sa.Column('classifier_id', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['classifier_id'], ['sfc_flow_classifiers.id'], ), + sa.PrimaryKeyConstraint('keyword', 'classifier_id') + ) diff --git a/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/c3e178d4a985_sfc_data_model.py b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/c3e178d4a985_sfc_data_model.py new file mode 100644 index 0000000..9f5362a --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/versions/liberty/expand/c3e178d4a985_sfc_data_model.py @@ -0,0 +1,119 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Defining Port Chain data-model. + +Revision ID: c3e178d4a985 +Revises: 9768e6a66c9 +Create Date: 2015-09-11 11:37:19.349951 + +""" + +from alembic import op +import sqlalchemy as sa + +from neutron.api.v2 import attributes as attr + +# revision identifiers, used by Alembic. +revision = 'c3e178d4a985' +down_revision = '9768e6a66c9' + + +def upgrade(): + op.create_table( + 'sfc_port_pair_groups', + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('tenant_id', sa.String(length=attr.TENANT_ID_MAX_LEN), + nullable=True, index=True), + sa.Column('name', sa.String(length=attr.NAME_MAX_LEN), + nullable=True), + sa.Column('description', sa.String(length=attr.DESCRIPTION_MAX_LEN), + nullable=True), + sa.PrimaryKeyConstraint('id') + ) + + op.create_table( + 'sfc_port_pairs', + sa.Column('tenant_id', sa.String(length=attr.TENANT_ID_MAX_LEN), + nullable=True, index=True), + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('name', sa.String(length=attr.NAME_MAX_LEN), nullable=True), + sa.Column('description', sa.String(length=attr.DESCRIPTION_MAX_LEN), + nullable=True), + sa.Column('ingress', sa.String(length=36), nullable=False), + sa.Column('egress', sa.String(length=36), nullable=False), + sa.Column('portpairgroup_id', sa.String(length=36), nullable=True), + sa.ForeignKeyConstraint(['egress'], ['ports.id'], + ondelete='RESTRICT'), + sa.ForeignKeyConstraint(['ingress'], ['ports.id'], + ondelete='RESTRICT'), + sa.ForeignKeyConstraint(['portpairgroup_id'], ['sfc_port_pair_groups.id'], + ondelete='RESTRICT'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('ingress', 'egress', + name='uniq_sfc_port_pairs0ingress0egress') + ) + + op.create_table( + 'sfc_port_chains', + sa.Column('tenant_id', sa.String(length=attr.TENANT_ID_MAX_LEN), + nullable=True, index=True), + sa.Column('id', sa.String(length=36), nullable=False), + sa.Column('name', sa.String(length=attr.NAME_MAX_LEN), + nullable=True), + sa.Column('description', sa.String(length=attr.DESCRIPTION_MAX_LEN), + nullable=True), + sa.PrimaryKeyConstraint('id') + ) + + op.create_table( + 'sfc_chain_group_associations', + sa.Column('portpairgroup_id', sa.String(length=36), nullable=False), + sa.Column('portchain_id', sa.String(length=36), nullable=False), + sa.Column('position', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['portchain_id'], ['sfc_port_chains.id'], ), + sa.ForeignKeyConstraint(['portpairgroup_id'], ['sfc_port_pair_groups.id'], + ondelete='RESTRICT'), + sa.PrimaryKeyConstraint('portpairgroup_id', 'portchain_id') + ) + + op.create_table( + 'sfc_port_chain_parameters', + sa.Column('keyword', sa.String(length=255), nullable=False), + sa.Column('value', sa.String(length=255), nullable=True), + sa.Column('chain_id', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['chain_id'], ['sfc_port_chains.id'], ), + sa.PrimaryKeyConstraint('keyword', 'chain_id') + ) + + op.create_table( + 'sfc_service_function_params', + sa.Column('keyword', sa.String(length=255), nullable=False), + sa.Column('value', sa.String(length=255), nullable=True), + sa.Column('pair_id', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['pair_id'], ['sfc_port_pairs.id'], ), + sa.PrimaryKeyConstraint('keyword', 'pair_id') + ) + + op.create_table( + 'sfc_chain_classifier_associations', + sa.Column('flowclassifier_id', sa.String(length=36), nullable=False), + sa.Column('portchain_id', sa.String(length=36), nullable=False), + sa.ForeignKeyConstraint(['flowclassifier_id'], + ['sfc_flow_classifiers.id'], + ondelete='RESTRICT'), + sa.ForeignKeyConstraint(['portchain_id'], ['sfc_port_chains.id'], ), + sa.PrimaryKeyConstraint('flowclassifier_id', 'portchain_id'), + sa.UniqueConstraint('flowclassifier_id') + ) diff --git a/networking_sfc/db/migration/alembic_migrations/versions/start_networking_sfc.py b/networking_sfc/db/migration/alembic_migrations/versions/start_networking_sfc.py new file mode 100644 index 0000000..4810d1e --- /dev/null +++ b/networking_sfc/db/migration/alembic_migrations/versions/start_networking_sfc.py @@ -0,0 +1,30 @@ +# Copyright 2014 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""start networking-sfc chain + +Revision ID: start_networking_sfc +Revises: None +Create Date: 2015-09-10 18:42:08.262632 + +""" + +# revision identifiers, used by Alembic. +revision = 'start_networking_sfc' +down_revision = None + + +def upgrade(): + pass diff --git a/networking_sfc/db/migration/models/__init__.py b/networking_sfc/db/migration/models/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/networking_sfc/db/migration/models/__init__.py diff --git a/networking_sfc/db/migration/models/head.py b/networking_sfc/db/migration/models/head.py new file mode 100644 index 0000000..1345cd6 --- /dev/null +++ b/networking_sfc/db/migration/models/head.py @@ -0,0 +1,23 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from neutron.db import model_base + +from networking_sfc.db import flowclassifier_db # noqa +from networking_sfc.db import sfc_db # noqa +from networking_sfc.services.sfc.drivers.ovs import db as ovs_db # noqa + + +def get_metadata(): + return model_base.BASEV2.metadata diff --git a/networking_sfc/db/sfc_db.py b/networking_sfc/db/sfc_db.py new file mode 100644 index 0000000..6dcd349 --- /dev/null +++ b/networking_sfc/db/sfc_db.py @@ -0,0 +1,553 @@ +# Copyright 2015 Futurewei. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + +from oslo_log import helpers as log_helpers +from oslo_log import log as logging +from oslo_utils import uuidutils + +import sqlalchemy as sa +from sqlalchemy.ext.orderinglist import ordering_list +from sqlalchemy import orm +from sqlalchemy.orm.collections import attribute_mapped_collection +from sqlalchemy.orm import exc + +from neutron.api.v2.attributes import DESCRIPTION_MAX_LEN +from neutron.api.v2.attributes import NAME_MAX_LEN +from neutron.db import common_db_mixin +from neutron.db import model_base +from neutron.db import models_v2 +from neutron.i18n import _LI + +from networking_sfc.db import flowclassifier_db as fc_db +from networking_sfc.extensions import flowclassifier as ext_fc +from networking_sfc.extensions import sfc as ext_sfc + + +LOG = logging.getLogger(__name__) + +UUID_LEN = 36 +PARAM_LEN = 255 + + +class ChainParameter(model_base.BASEV2): + """Represents a single chain parameter.""" + __tablename__ = 'sfc_port_chain_parameters' + keyword = sa.Column(sa.String(PARAM_LEN), primary_key=True) + value = sa.Column(sa.String(PARAM_LEN)) + chain_id = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('sfc_port_chains.id', ondelete='CASCADE'), + primary_key=True) + + +class ServiceFunctionParam(model_base.BASEV2): + """Represents a service function parameter.""" + __tablename__ = 'sfc_service_function_params' + keyword = sa.Column(sa.String(PARAM_LEN), primary_key=True) + value = sa.Column(sa.String(PARAM_LEN)) + pair_id = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('sfc_port_pairs.id', ondelete='CASCADE'), + primary_key=True) + + +class ChainClassifierAssoc(model_base.BASEV2): + """Relation table between sfc_port_chains and flow_classifiers.""" + __tablename__ = 'sfc_chain_classifier_associations' + flowclassifier_id = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('sfc_flow_classifiers.id', ondelete='RESTRICT'), + primary_key=True, nullable=False, unique=True) + portchain_id = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('sfc_port_chains.id', ondelete='CASCADE'), + primary_key=True) + flow_classifier = orm.relationship( + fc_db.FlowClassifier, + backref='chain_classifier_associations' + ) + + +class PortPair(model_base.BASEV2, models_v2.HasId, + models_v2.HasTenant): + """Represents the ingress and egress ports for a single service function. + + """ + __tablename__ = 'sfc_port_pairs' + name = sa.Column(sa.String(NAME_MAX_LEN)) + description = sa.Column(sa.String(DESCRIPTION_MAX_LEN)) + ingress = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('ports.id', ondelete='RESTRICT'), + nullable=False) + egress = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('ports.id', ondelete='RESTRICT'), + nullable=False) + + portpairgroup_id = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('sfc_port_pair_groups.id', ondelete='RESTRICT')) + service_function_parameters = orm.relationship( + ServiceFunctionParam, + collection_class=attribute_mapped_collection('keyword'), + cascade='all, delete-orphan') + + __table_args__ = ( + sa.UniqueConstraint( + ingress, egress, + name='uniq_sfc_port_pairs0ingress0egress' + ), + model_base.BASEV2.__table_args__ + ) + + +class ChainGroupAssoc(model_base.BASEV2): + """Relation table between sfc_port_chains and sfc_port_pair_groups.""" + __tablename__ = 'sfc_chain_group_associations' + portpairgroup_id = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('sfc_port_pair_groups.id', ondelete='RESTRICT'), + primary_key=True, nullable=False) + portchain_id = sa.Column( + sa.String(UUID_LEN), + sa.ForeignKey('sfc_port_chains.id', ondelete='CASCADE'), + primary_key=True) + position = sa.Column(sa.Integer) + + +class PortPairGroup(model_base.BASEV2, models_v2.HasId, + models_v2.HasTenant): + """Represents a port pair group model.""" + __tablename__ = 'sfc_port_pair_groups' + name = sa.Column(sa.String(NAME_MAX_LEN)) + description = sa.Column(sa.String(DESCRIPTION_MAX_LEN)) + port_pairs = orm.relationship( + PortPair, + backref='port_pair_group' + ) + chain_group_associations = orm.relationship( + ChainGroupAssoc, + backref='port_pair_groups') + + +class PortChain(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant): + """Represents a Neutron service function Port Chain.""" + __tablename__ = 'sfc_port_chains' + + name = sa.Column(sa.String(NAME_MAX_LEN)) + description = sa.Column(sa.String(DESCRIPTION_MAX_LEN)) + chain_group_associations = orm.relationship( + ChainGroupAssoc, + backref='port_chain', + order_by="ChainGroupAssoc.position", + collection_class=ordering_list('position'), + cascade='all, delete-orphan') + chain_classifier_associations = orm.relationship( + ChainClassifierAssoc, + backref='port_chain', + cascade='all, delete-orphan') + chain_parameters = orm.relationship( + ChainParameter, + collection_class=attribute_mapped_collection('keyword'), + cascade='all, delete-orphan') + + +class SfcDbPlugin( + ext_sfc.SfcPluginBase, + common_db_mixin.CommonDbMixin +): + """Mixin class to add port chain to db_plugin_base_v2.""" + + def _make_port_chain_dict(self, port_chain, fields=None): + res = { + 'id': port_chain['id'], + 'name': port_chain['name'], + 'tenant_id': port_chain['tenant_id'], + 'description': port_chain['description'], + 'port_pair_groups': [ + assoc['portpairgroup_id'] + for assoc in port_chain['chain_group_associations'] + ], + 'flow_classifiers': [ + assoc['flowclassifier_id'] + for assoc in port_chain['chain_classifier_associations'] + ], + 'chain_parameters': { + param['keyword']: param['value'] + for k, param in six.iteritems(port_chain['chain_parameters']) + } + } + return self._fields(res, fields) + + def _validate_port_pair_groups(self, context, pg_ids): + with context.session.begin(subtransactions=True): + query = self._model_query(context, PortChain) + for port_chain_db in query.all(): + pc_pg_ids = [ + assoc['portpairgroup_id'] + for assoc in port_chain_db.chain_group_associations + ] + if pc_pg_ids == pg_ids: + raise ext_sfc.InvalidPortPairGroups( + port_pair_groups=pg_ids, port_chain=port_chain_db.id) + + def _validate_flow_classifiers(self, context, fc_ids): + # TODO(xiaodong): Validate flow classifiers if needed in future. + pass + + def _setup_chain_group_associations( + self, context, port_chain, pg_ids + ): + with context.session.begin(subtransactions=True): + chain_group_associations = [] + for pg_id in pg_ids: + query = self._model_query(context, ChainGroupAssoc) + chain_group_association = query.filter_by( + portchain_id=port_chain.id, portpairgroup_id=pg_id + ).first() + if not chain_group_association: + chain_group_association = ChainGroupAssoc( + portpairgroup_id=pg_id + ) + chain_group_associations.append(chain_group_association) + port_chain.chain_group_associations = chain_group_associations + + def _setup_chain_classifier_associations( + self, context, port_chain, fc_ids + ): + with context.session.begin(subtransactions=True): + chain_classifier_associations = [] + for fc_id in fc_ids: + query = self._model_query(context, ChainClassifierAssoc) + chain_classifier_association = query.filter_by( + portchain_id=port_chain.id, flowclassifier_id=fc_id + ).first() + if not chain_classifier_association: + chain_classifier_association = ChainClassifierAssoc( + flowclassifier_id=fc_id + ) + chain_classifier_associations.append( + chain_classifier_association) + port_chain.chain_classifier_associations = ( + chain_classifier_associations) + + @log_helpers.log_method_call + def create_port_chain(self, context, port_chain): + """Create a port chain.""" + pc = port_chain['port_chain'] + tenant_id = self._get_tenant_id_for_create(context, pc) + with context.session.begin(subtransactions=True): + chain_parameters = { + key: ChainParameter(keyword=key, value=val) + for key, val in six.iteritems(pc['chain_parameters'])} + + pg_ids = pc['port_pair_groups'] + for pg_id in pg_ids: + self._get_port_pair_group(context, pg_id) + fc_ids = pc['flow_classifiers'] + fcs = [ + self._get_flow_classifier(context, fc_id) + for fc_id in fc_ids + ] + for fc in fcs: + if fc.chain_classifier_associations: + raise ext_fc.FlowClassifierInUse(id=fc.id) + + self._validate_port_pair_groups(context, pg_ids) + self._validate_flow_classifiers(context, fc_ids) + port_chain_db = PortChain(id=uuidutils.generate_uuid(), + tenant_id=tenant_id, + description=pc['description'], + name=pc['name'], + chain_parameters=chain_parameters) + self._setup_chain_group_associations( + context, port_chain_db, pg_ids) + self._setup_chain_classifier_associations( + context, port_chain_db, fc_ids) + context.session.add(port_chain_db) + + return self._make_port_chain_dict(port_chain_db) + + @log_helpers.log_method_call + def get_port_chains(self, context, filters=None, fields=None, + sorts=None, limit=None, + marker=None, page_reverse=False, default_sg=False): + + marker_obj = self._get_marker_obj(context, 'port_chain', limit, marker) + return self._get_collection(context, + PortChain, + self._make_port_chain_dict, + filters=filters, fields=fields, + sorts=sorts, + limit=limit, marker_obj=marker_obj, + page_reverse=page_reverse) + + def get_port_chains_count(self, context, filters=None): + return self._get_collection_count(context, PortChain, + filters=filters) + + @log_helpers.log_method_call + def get_port_chain(self, context, id, fields=None): + portchain = self._get_port_chain(context, id) + return self._make_port_chain_dict(portchain, fields) + + @log_helpers.log_method_call + def _get_port_chain(self, context, id): + try: + return self._get_by_id(context, PortChain, id) + except exc.NoResultFound: + raise ext_sfc.PortChainNotFound(id=id) + + @log_helpers.log_method_call + def delete_port_chain(self, context, id): + try: + with context.session.begin(subtransactions=True): + pc = self._get_port_chain(context, id) + context.session.delete(pc) + except ext_sfc.PortChainNotFound: + LOG.info(_LI("Deleting a non-existing port chain.")) + + @log_helpers.log_method_call + def update_port_chain(self, context, id, port_chain): + p = port_chain['port_chain'] + with context.session.begin(subtransactions=True): + pc = self._get_port_chain(context, id) + for k, v in six.iteritems(p): + if k == 'flow_classifiers': + for fc_id in v: + self._get_flow_classifier(context, fc_id) + self._setup_chain_classifier_associations(context, pc, v) + else: + pc[k] = v + return self._make_port_chain_dict(pc) + + def _make_port_pair_dict(self, port_pair, fields=None): + res = { + 'id': port_pair['id'], + 'name': port_pair['name'], + 'description': port_pair['description'], + 'tenant_id': port_pair['tenant_id'], + 'ingress': port_pair['ingress'], + 'egress': port_pair['egress'], + 'service_function_parameters': { + param['keyword']: param['value'] + for k, param in six.iteritems( + port_pair['service_function_parameters']) + } + } + + return self._fields(res, fields) + + def _validate_port_pair_ingress_egress(self, ingress, egress): + if 'device_id' not in ingress or not ingress['device_id']: + raise ext_sfc.PortPairIngressNoHost( + ingress=ingress['id'] + ) + if 'device_id' not in egress or not egress['device_id']: + raise ext_sfc.PortpairEgressNoHost( + egress=egress['id'] + ) + if ingress['device_id'] != egress['device_id']: + raise ext_sfc.PortPairIngressEgressDifferentHost( + ingress=ingress['id'], + egress=egress['id']) + + @log_helpers.log_method_call + def create_port_pair(self, context, port_pair): + """Create a port pair.""" + pp = port_pair['port_pair'] + tenant_id = self._get_tenant_id_for_create(context, pp) + with context.session.begin(subtransactions=True): + service_function_parameters = { + key: ServiceFunctionParam(keyword=key, value=val) + for key, val in six.iteritems( + pp['service_function_parameters'] + ) + } + ingress = self._get_port(context, pp['ingress']) + egress = self._get_port(context, pp['egress']) + self._validate_port_pair_ingress_egress(ingress, egress) + port_pair_db = PortPair( + id=uuidutils.generate_uuid(), + name=pp['name'], + description=pp['description'], + tenant_id=tenant_id, + ingress=pp['ingress'], + egress=pp['egress'], + service_function_parameters=service_function_parameters + ) + context.session.add(port_pair_db) + return self._make_port_pair_dict(port_pair_db) + + @log_helpers.log_method_call + def get_port_pairs(self, context, filters=None, fields=None, + sorts=None, limit=None, marker=None, + page_reverse=False): + marker_obj = self._get_marker_obj(context, 'port_pair', + limit, marker) + return self._get_collection(context, + PortPair, + self._make_port_pair_dict, + filters=filters, fields=fields, + sorts=sorts, + limit=limit, marker_obj=marker_obj, + page_reverse=page_reverse) + + def get_port_pairs_count(self, context, filters=None): + return self._get_collection_count(context, PortPair, + filters=filters) + + @log_helpers.log_method_call + def get_port_pair(self, context, id, fields=None): + port_pair = self._get_port_pair(context, id) + return self._make_port_pair_dict(port_pair, fields) + + def _get_port_pair(self, context, id): + try: + return self._get_by_id(context, PortPair, id) + except exc.NoResultFound: + raise ext_sfc.PortPairNotFound(id=id) + + def _get_port(self, context, id): + try: + return self._get_by_id(context, models_v2.Port, id) + except exc.NoResultFound: + raise ext_sfc.PortPairPortNotFound(id=id) + + @log_helpers.log_method_call + def update_port_pair(self, context, id, port_pair): + new_pp = port_pair['port_pair'] + with context.session.begin(subtransactions=True): + old_pp = self._get_port_pair(context, id) + old_pp.update(new_pp) + return self._make_port_pair_dict(old_pp) + + @log_helpers.log_method_call + def delete_port_pair(self, context, id): + try: + with context.session.begin(subtransactions=True): + pp = self._get_port_pair(context, id) + if pp.portpairgroup_id: + raise ext_sfc.PortPairInUse(id=id) + context.session.delete(pp) + except ext_sfc.PortPairNotFound: + LOG.info(_LI("Deleting a non-existing port pair.")) + + def _make_port_pair_group_dict(self, port_pair_group, fields=None): + res = { + 'id': port_pair_group['id'], + 'name': port_pair_group['name'], + 'description': port_pair_group['description'], + 'tenant_id': port_pair_group['tenant_id'], + 'port_pairs': [pp['id'] for pp in port_pair_group['port_pairs']], + } + + return self._fields(res, fields) + + @log_helpers.log_method_call + def create_port_pair_group(self, context, port_pair_group): + """Create a port pair group.""" + pg = port_pair_group['port_pair_group'] + tenant_id = self._get_tenant_id_for_create(context, pg) + + with context.session.begin(subtransactions=True): + portpairs_list = [self._get_port_pair(context, pp_id) + for pp_id in pg['port_pairs']] + for portpair in portpairs_list: + if portpair.portpairgroup_id: + raise ext_sfc.PortPairInUse(id=portpair.id) + port_pair_group_db = PortPairGroup( + id=uuidutils.generate_uuid(), + name=pg['name'], + description=pg['description'], + tenant_id=tenant_id, + port_pairs=portpairs_list) + context.session.add(port_pair_group_db) + return self._make_port_pair_group_dict(port_pair_group_db) + + @log_helpers.log_method_call + def get_port_pair_groups(self, context, filters=None, fields=None, + sorts=None, limit=None, marker=None, + page_reverse=False): + marker_obj = self._get_marker_obj(context, 'port_pair_group', + limit, marker) + return self._get_collection(context, + PortPairGroup, + self._make_port_pair_group_dict, + filters=filters, fields=fields, + sorts=sorts, + limit=limit, marker_obj=marker_obj, + page_reverse=page_reverse) + + def get_port_pair_groups_count(self, context, filters=None): + return self._get_collection_count(context, PortPairGroup, + filters=filters) + + @log_helpers.log_method_call + def get_port_pair_group(self, context, id, fields=None): + port_pair_group = self._get_port_pair_group(context, id) + return self._make_port_pair_group_dict(port_pair_group, fields) + + def _get_port_pair_group(self, context, id): + try: + return self._get_by_id(context, PortPairGroup, id) + except exc.NoResultFound: + raise ext_sfc.PortPairGroupNotFound(id=id) + + def _get_flow_classifier(self, context, id): + try: + return self._get_by_id(context, fc_db.FlowClassifier, id) + except exc.NoResultFound: + raise ext_fc.FlowClassifierNotFound(id=id) + + @log_helpers.log_method_call + def update_port_pair_group(self, context, id, port_pair_group): + new_pg = port_pair_group['port_pair_group'] + + with context.session.begin(subtransactions=True): + portpairs_list = [self._get_port_pair(context, pp_id) + for pp_id in new_pg.get('port_pairs', [])] + for portpair in portpairs_list: + if ( + portpair.portpairgroup_id and + portpair.portpairgroup_id != id + ): + raise ext_sfc.PortPairInUse(id=portpair.id) + + old_pg = self._get_port_pair_group(context, id) + for k, v in six.iteritems(new_pg): + if k == 'port_pairs': + port_pairs = [ + self._get_port_pair(context, pp_id) + for pp_id in v + ] + old_pg.port_pairs = port_pairs + else: + old_pg[k] = v + + return self._make_port_pair_group_dict(old_pg) + + @log_helpers.log_method_call + def delete_port_pair_group(self, context, id): + try: + with context.session.begin(subtransactions=True): + pg = self._get_port_pair_group(context, id) + if pg.chain_group_associations: + raise ext_sfc.PortPairGroupInUse(id=id) + context.session.delete(pg) + except ext_sfc.PortPairGroupNotFound: + LOG.info(_LI("Deleting a non-existing port pair group.")) |