aboutsummaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/contrib
diff options
context:
space:
mode:
authorWuKong <rebirthmonkey@gmail.com>2015-06-30 18:47:29 +0200
committerWuKong <rebirthmonkey@gmail.com>2015-06-30 18:47:29 +0200
commitb8c756ecdd7cced1db4300935484e8c83701c82e (patch)
tree87e51107d82b217ede145de9d9d59e2100725bd7 /keystone-moon/keystone/contrib
parentc304c773bae68fb854ed9eab8fb35c4ef17cf136 (diff)
migrate moon code from github to opnfv
Change-Id: Ice53e368fd1114d56a75271aa9f2e598e3eba604 Signed-off-by: WuKong <rebirthmonkey@gmail.com>
Diffstat (limited to 'keystone-moon/keystone/contrib')
-rw-r--r--keystone-moon/keystone/contrib/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/admin_crud/__init__.py15
-rw-r--r--keystone-moon/keystone/contrib/admin_crud/core.py241
-rw-r--r--keystone-moon/keystone/contrib/ec2/__init__.py18
-rw-r--r--keystone-moon/keystone/contrib/ec2/controllers.py415
-rw-r--r--keystone-moon/keystone/contrib/ec2/core.py34
-rw-r--r--keystone-moon/keystone/contrib/ec2/routers.py95
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/__init__.py15
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py76
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py224
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/controllers.py300
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/core.py289
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg25
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py47
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py51
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/routers.py149
-rw-r--r--keystone-moon/keystone/contrib/endpoint_filter/schema.py35
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/__init__.py15
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py140
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/controllers.py166
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/core.py430
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg25
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py48
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/endpoint_policy/routers.py85
-rw-r--r--keystone-moon/keystone/contrib/example/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/example/configuration.rst31
-rw-r--r--keystone-moon/keystone/contrib/example/controllers.py26
-rw-r--r--keystone-moon/keystone/contrib/example/core.py92
-rw-r--r--keystone-moon/keystone/contrib/example/migrate_repo/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg25
-rw-r--r--keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py43
-rw-r--r--keystone-moon/keystone/contrib/example/migrate_repo/versions/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/example/routers.py38
-rw-r--r--keystone-moon/keystone/contrib/federation/__init__.py15
-rw-r--r--keystone-moon/keystone/contrib/federation/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/federation/backends/sql.py315
-rw-r--r--keystone-moon/keystone/contrib/federation/controllers.py457
-rw-r--r--keystone-moon/keystone/contrib/federation/core.py346
-rw-r--r--keystone-moon/keystone/contrib/federation/idp.py558
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg25
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py51
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py37
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py35
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py30
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py38
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py48
-rw-r--r--keystone-moon/keystone/contrib/federation/migrate_repo/versions/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/federation/routers.py226
-rw-r--r--keystone-moon/keystone/contrib/federation/schema.py78
-rw-r--r--keystone-moon/keystone/contrib/federation/utils.py763
-rw-r--r--keystone-moon/keystone/contrib/moon/__init__.py8
-rw-r--r--keystone-moon/keystone/contrib/moon/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/moon/backends/flat.py123
-rw-r--r--keystone-moon/keystone/contrib/moon/backends/sql.py1537
-rw-r--r--keystone-moon/keystone/contrib/moon/controllers.py611
-rw-r--r--keystone-moon/keystone/contrib/moon/core.py2375
-rw-r--r--keystone-moon/keystone/contrib/moon/exception.py112
-rw-r--r--keystone-moon/keystone/contrib/moon/extension.py740
-rw-r--r--keystone-moon/keystone/contrib/moon/migrate_repo/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg25
-rw-r--r--keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py194
-rw-r--r--keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py34
-rw-r--r--keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py32
-rw-r--r--keystone-moon/keystone/contrib/moon/routers.py443
-rw-r--r--keystone-moon/keystone/contrib/oauth1/__init__.py15
-rw-r--r--keystone-moon/keystone/contrib/oauth1/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/oauth1/backends/sql.py272
-rw-r--r--keystone-moon/keystone/contrib/oauth1/controllers.py417
-rw-r--r--keystone-moon/keystone/contrib/oauth1/core.py361
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg25
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py67
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py54
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py29
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py35
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py42
-rw-r--r--keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/oauth1/routers.py154
-rw-r--r--keystone-moon/keystone/contrib/oauth1/validator.py179
-rw-r--r--keystone-moon/keystone/contrib/revoke/__init__.py13
-rw-r--r--keystone-moon/keystone/contrib/revoke/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/revoke/backends/kvs.py73
-rw-r--r--keystone-moon/keystone/contrib/revoke/backends/sql.py104
-rw-r--r--keystone-moon/keystone/contrib/revoke/controllers.py44
-rw-r--r--keystone-moon/keystone/contrib/revoke/core.py250
-rw-r--r--keystone-moon/keystone/contrib/revoke/migrate_repo/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg25
-rw-r--r--keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py47
-rw-r--r--keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py37
-rw-r--r--keystone-moon/keystone/contrib/revoke/migrate_repo/versions/__init__.py0
-rw-r--r--keystone-moon/keystone/contrib/revoke/model.py365
-rw-r--r--keystone-moon/keystone/contrib/revoke/routers.py29
-rw-r--r--keystone-moon/keystone/contrib/s3/__init__.py15
-rw-r--r--keystone-moon/keystone/contrib/s3/core.py73
-rw-r--r--keystone-moon/keystone/contrib/simple_cert/__init__.py14
-rw-r--r--keystone-moon/keystone/contrib/simple_cert/controllers.py42
-rw-r--r--keystone-moon/keystone/contrib/simple_cert/core.py32
-rw-r--r--keystone-moon/keystone/contrib/simple_cert/routers.py41
-rw-r--r--keystone-moon/keystone/contrib/user_crud/__init__.py15
-rw-r--r--keystone-moon/keystone/contrib/user_crud/core.py134
107 files changed, 15452 insertions, 0 deletions
diff --git a/keystone-moon/keystone/contrib/__init__.py b/keystone-moon/keystone/contrib/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/__init__.py
diff --git a/keystone-moon/keystone/contrib/admin_crud/__init__.py b/keystone-moon/keystone/contrib/admin_crud/__init__.py
new file mode 100644
index 00000000..d6020920
--- /dev/null
+++ b/keystone-moon/keystone/contrib/admin_crud/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.admin_crud.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/admin_crud/core.py b/keystone-moon/keystone/contrib/admin_crud/core.py
new file mode 100644
index 00000000..5d69d249
--- /dev/null
+++ b/keystone-moon/keystone/contrib/admin_crud/core.py
@@ -0,0 +1,241 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone import assignment
+from keystone import catalog
+from keystone.common import extension
+from keystone.common import wsgi
+from keystone import identity
+from keystone import resource
+
+
+extension.register_admin_extension(
+ 'OS-KSADM', {
+ 'name': 'OpenStack Keystone Admin',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-KSADM/v1.0',
+ 'alias': 'OS-KSADM',
+ 'updated': '2013-07-11T17:14:00-00:00',
+ 'description': 'OpenStack extensions to Keystone v2.0 API '
+ 'enabling Administrative Operations.',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ # TODO(dolph): link needs to be revised after
+ # bug 928059 merges
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api',
+ }
+ ]})
+
+
+class CrudExtension(wsgi.ExtensionRouter):
+ """Previously known as the OS-KSADM extension.
+
+ Provides a bunch of CRUD operations for internal data types.
+
+ """
+
+ def add_routes(self, mapper):
+ tenant_controller = resource.controllers.Tenant()
+ assignment_tenant_controller = (
+ assignment.controllers.TenantAssignment())
+ user_controller = identity.controllers.User()
+ role_controller = assignment.controllers.Role()
+ assignment_role_controller = assignment.controllers.RoleAssignmentV2()
+ service_controller = catalog.controllers.Service()
+ endpoint_controller = catalog.controllers.Endpoint()
+
+ # Tenant Operations
+ mapper.connect(
+ '/tenants',
+ controller=tenant_controller,
+ action='create_project',
+ conditions=dict(method=['POST']))
+ mapper.connect(
+ '/tenants/{tenant_id}',
+ controller=tenant_controller,
+ action='update_project',
+ conditions=dict(method=['PUT', 'POST']))
+ mapper.connect(
+ '/tenants/{tenant_id}',
+ controller=tenant_controller,
+ action='delete_project',
+ conditions=dict(method=['DELETE']))
+ mapper.connect(
+ '/tenants/{tenant_id}/users',
+ controller=assignment_tenant_controller,
+ action='get_project_users',
+ conditions=dict(method=['GET']))
+
+ # User Operations
+ mapper.connect(
+ '/users',
+ controller=user_controller,
+ action='get_users',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/users',
+ controller=user_controller,
+ action='create_user',
+ conditions=dict(method=['POST']))
+ # NOTE(termie): not in diablo
+ mapper.connect(
+ '/users/{user_id}',
+ controller=user_controller,
+ action='update_user',
+ conditions=dict(method=['PUT']))
+ mapper.connect(
+ '/users/{user_id}',
+ controller=user_controller,
+ action='delete_user',
+ conditions=dict(method=['DELETE']))
+
+ # COMPAT(diablo): the copy with no OS-KSADM is from diablo
+ mapper.connect(
+ '/users/{user_id}/password',
+ controller=user_controller,
+ action='set_user_password',
+ conditions=dict(method=['PUT']))
+ mapper.connect(
+ '/users/{user_id}/OS-KSADM/password',
+ controller=user_controller,
+ action='set_user_password',
+ conditions=dict(method=['PUT']))
+
+ # COMPAT(diablo): the copy with no OS-KSADM is from diablo
+ mapper.connect(
+ '/users/{user_id}/tenant',
+ controller=user_controller,
+ action='update_user',
+ conditions=dict(method=['PUT']))
+ mapper.connect(
+ '/users/{user_id}/OS-KSADM/tenant',
+ controller=user_controller,
+ action='update_user',
+ conditions=dict(method=['PUT']))
+
+ # COMPAT(diablo): the copy with no OS-KSADM is from diablo
+ mapper.connect(
+ '/users/{user_id}/enabled',
+ controller=user_controller,
+ action='set_user_enabled',
+ conditions=dict(method=['PUT']))
+ mapper.connect(
+ '/users/{user_id}/OS-KSADM/enabled',
+ controller=user_controller,
+ action='set_user_enabled',
+ conditions=dict(method=['PUT']))
+
+ # User Roles
+ mapper.connect(
+ '/users/{user_id}/roles/OS-KSADM/{role_id}',
+ controller=assignment_role_controller,
+ action='add_role_to_user',
+ conditions=dict(method=['PUT']))
+ mapper.connect(
+ '/users/{user_id}/roles/OS-KSADM/{role_id}',
+ controller=assignment_role_controller,
+ action='remove_role_from_user',
+ conditions=dict(method=['DELETE']))
+
+ # COMPAT(diablo): User Roles
+ mapper.connect(
+ '/users/{user_id}/roleRefs',
+ controller=assignment_role_controller,
+ action='get_role_refs',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/users/{user_id}/roleRefs',
+ controller=assignment_role_controller,
+ action='create_role_ref',
+ conditions=dict(method=['POST']))
+ mapper.connect(
+ '/users/{user_id}/roleRefs/{role_ref_id}',
+ controller=assignment_role_controller,
+ action='delete_role_ref',
+ conditions=dict(method=['DELETE']))
+
+ # User-Tenant Roles
+ mapper.connect(
+ '/tenants/{tenant_id}/users/{user_id}/roles/OS-KSADM/{role_id}',
+ controller=assignment_role_controller,
+ action='add_role_to_user',
+ conditions=dict(method=['PUT']))
+ mapper.connect(
+ '/tenants/{tenant_id}/users/{user_id}/roles/OS-KSADM/{role_id}',
+ controller=assignment_role_controller,
+ action='remove_role_from_user',
+ conditions=dict(method=['DELETE']))
+
+ # Service Operations
+ mapper.connect(
+ '/OS-KSADM/services',
+ controller=service_controller,
+ action='get_services',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/OS-KSADM/services',
+ controller=service_controller,
+ action='create_service',
+ conditions=dict(method=['POST']))
+ mapper.connect(
+ '/OS-KSADM/services/{service_id}',
+ controller=service_controller,
+ action='delete_service',
+ conditions=dict(method=['DELETE']))
+ mapper.connect(
+ '/OS-KSADM/services/{service_id}',
+ controller=service_controller,
+ action='get_service',
+ conditions=dict(method=['GET']))
+
+ # Endpoint Templates
+ mapper.connect(
+ '/endpoints',
+ controller=endpoint_controller,
+ action='get_endpoints',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/endpoints',
+ controller=endpoint_controller,
+ action='create_endpoint',
+ conditions=dict(method=['POST']))
+ mapper.connect(
+ '/endpoints/{endpoint_id}',
+ controller=endpoint_controller,
+ action='delete_endpoint',
+ conditions=dict(method=['DELETE']))
+
+ # Role Operations
+ mapper.connect(
+ '/OS-KSADM/roles',
+ controller=role_controller,
+ action='create_role',
+ conditions=dict(method=['POST']))
+ mapper.connect(
+ '/OS-KSADM/roles',
+ controller=role_controller,
+ action='get_roles',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/OS-KSADM/roles/{role_id}',
+ controller=role_controller,
+ action='get_role',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/OS-KSADM/roles/{role_id}',
+ controller=role_controller,
+ action='delete_role',
+ conditions=dict(method=['DELETE']))
diff --git a/keystone-moon/keystone/contrib/ec2/__init__.py b/keystone-moon/keystone/contrib/ec2/__init__.py
new file mode 100644
index 00000000..88622e53
--- /dev/null
+++ b/keystone-moon/keystone/contrib/ec2/__init__.py
@@ -0,0 +1,18 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.ec2 import controllers # noqa
+from keystone.contrib.ec2.core import * # noqa
+from keystone.contrib.ec2.routers import Ec2Extension # noqa
+from keystone.contrib.ec2.routers import Ec2ExtensionV3 # noqa
diff --git a/keystone-moon/keystone/contrib/ec2/controllers.py b/keystone-moon/keystone/contrib/ec2/controllers.py
new file mode 100644
index 00000000..6e6d3268
--- /dev/null
+++ b/keystone-moon/keystone/contrib/ec2/controllers.py
@@ -0,0 +1,415 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Main entry point into the EC2 Credentials service.
+
+This service allows the creation of access/secret credentials used for
+the ec2 interop layer of OpenStack.
+
+A user can create as many access/secret pairs, each of which map to a
+specific project. This is required because OpenStack supports a user
+belonging to multiple projects, whereas the signatures created on ec2-style
+requests don't allow specification of which project the user wishes to act
+upon.
+
+To complete the cycle, we provide a method that OpenStack services can
+use to validate a signature and get a corresponding OpenStack token. This
+token allows method calls to other services within the context the
+access/secret was created. As an example, Nova requests Keystone to validate
+the signature of a request, receives a token, and then makes a request to
+Glance to list images needed to perform the requested task.
+
+"""
+
+import abc
+import sys
+import uuid
+
+from keystoneclient.contrib.ec2 import utils as ec2_utils
+from oslo_serialization import jsonutils
+import six
+
+from keystone.common import controller
+from keystone.common import dependency
+from keystone.common import utils
+from keystone.common import wsgi
+from keystone import exception
+from keystone.i18n import _
+from keystone.models import token_model
+
+
+@dependency.requires('assignment_api', 'catalog_api', 'credential_api',
+ 'identity_api', 'resource_api', 'role_api',
+ 'token_provider_api')
+@six.add_metaclass(abc.ABCMeta)
+class Ec2ControllerCommon(object):
+ def check_signature(self, creds_ref, credentials):
+ signer = ec2_utils.Ec2Signer(creds_ref['secret'])
+ signature = signer.generate(credentials)
+ if utils.auth_str_equal(credentials['signature'], signature):
+ return
+ # NOTE(vish): Some libraries don't use the port when signing
+ # requests, so try again without port.
+ elif ':' in credentials['signature']:
+ hostname, _port = credentials['host'].split(':')
+ credentials['host'] = hostname
+ signature = signer.generate(credentials)
+ if not utils.auth_str_equal(credentials.signature, signature):
+ raise exception.Unauthorized(message='Invalid EC2 signature.')
+ else:
+ raise exception.Unauthorized(message='EC2 signature not supplied.')
+
+ @abc.abstractmethod
+ def authenticate(self, context, credentials=None, ec2Credentials=None):
+ """Validate a signed EC2 request and provide a token.
+
+ Other services (such as Nova) use this **admin** call to determine
+ if a request they signed received is from a valid user.
+
+ If it is a valid signature, an OpenStack token that maps
+ to the user/tenant is returned to the caller, along with
+ all the other details returned from a normal token validation
+ call.
+
+ The returned token is useful for making calls to other
+ OpenStack services within the context of the request.
+
+ :param context: standard context
+ :param credentials: dict of ec2 signature
+ :param ec2Credentials: DEPRECATED dict of ec2 signature
+ :returns: token: OpenStack token equivalent to access key along
+ with the corresponding service catalog and roles
+ """
+ raise exception.NotImplemented()
+
+ def _authenticate(self, credentials=None, ec2credentials=None):
+ """Common code shared between the V2 and V3 authenticate methods.
+
+ :returns: user_ref, tenant_ref, metadata_ref, roles_ref, catalog_ref
+ """
+
+ # FIXME(ja): validate that a service token was used!
+
+ # NOTE(termie): backwards compat hack
+ if not credentials and ec2credentials:
+ credentials = ec2credentials
+
+ if 'access' not in credentials:
+ raise exception.Unauthorized(message='EC2 signature not supplied.')
+
+ creds_ref = self._get_credentials(credentials['access'])
+ self.check_signature(creds_ref, credentials)
+
+ # TODO(termie): don't create new tokens every time
+ # TODO(termie): this is copied from TokenController.authenticate
+ tenant_ref = self.resource_api.get_project(creds_ref['tenant_id'])
+ user_ref = self.identity_api.get_user(creds_ref['user_id'])
+ metadata_ref = {}
+ metadata_ref['roles'] = (
+ self.assignment_api.get_roles_for_user_and_project(
+ user_ref['id'], tenant_ref['id']))
+
+ trust_id = creds_ref.get('trust_id')
+ if trust_id:
+ metadata_ref['trust_id'] = trust_id
+ metadata_ref['trustee_user_id'] = user_ref['id']
+
+ # Validate that the auth info is valid and nothing is disabled
+ try:
+ self.identity_api.assert_user_enabled(
+ user_id=user_ref['id'], user=user_ref)
+ self.resource_api.assert_domain_enabled(
+ domain_id=user_ref['domain_id'])
+ self.resource_api.assert_project_enabled(
+ project_id=tenant_ref['id'], project=tenant_ref)
+ except AssertionError as e:
+ six.reraise(exception.Unauthorized, exception.Unauthorized(e),
+ sys.exc_info()[2])
+
+ roles = metadata_ref.get('roles', [])
+ if not roles:
+ raise exception.Unauthorized(message='User not valid for tenant.')
+ roles_ref = [self.role_api.get_role(role_id) for role_id in roles]
+
+ catalog_ref = self.catalog_api.get_catalog(
+ user_ref['id'], tenant_ref['id'])
+
+ return user_ref, tenant_ref, metadata_ref, roles_ref, catalog_ref
+
+ def create_credential(self, context, user_id, tenant_id):
+ """Create a secret/access pair for use with ec2 style auth.
+
+ Generates a new set of credentials that map the user/tenant
+ pair.
+
+ :param context: standard context
+ :param user_id: id of user
+ :param tenant_id: id of tenant
+ :returns: credential: dict of ec2 credential
+ """
+
+ self.identity_api.get_user(user_id)
+ self.resource_api.get_project(tenant_id)
+ trust_id = self._get_trust_id_for_request(context)
+ blob = {'access': uuid.uuid4().hex,
+ 'secret': uuid.uuid4().hex,
+ 'trust_id': trust_id}
+ credential_id = utils.hash_access_key(blob['access'])
+ cred_ref = {'user_id': user_id,
+ 'project_id': tenant_id,
+ 'blob': jsonutils.dumps(blob),
+ 'id': credential_id,
+ 'type': 'ec2'}
+ self.credential_api.create_credential(credential_id, cred_ref)
+ return {'credential': self._convert_v3_to_ec2_credential(cred_ref)}
+
+ def get_credentials(self, user_id):
+ """List all credentials for a user.
+
+ :param user_id: id of user
+ :returns: credentials: list of ec2 credential dicts
+ """
+
+ self.identity_api.get_user(user_id)
+ credential_refs = self.credential_api.list_credentials_for_user(
+ user_id)
+ return {'credentials':
+ [self._convert_v3_to_ec2_credential(credential)
+ for credential in credential_refs]}
+
+ def get_credential(self, user_id, credential_id):
+ """Retrieve a user's access/secret pair by the access key.
+
+ Grab the full access/secret pair for a given access key.
+
+ :param user_id: id of user
+ :param credential_id: access key for credentials
+ :returns: credential: dict of ec2 credential
+ """
+
+ self.identity_api.get_user(user_id)
+ return {'credential': self._get_credentials(credential_id)}
+
+ def delete_credential(self, user_id, credential_id):
+ """Delete a user's access/secret pair.
+
+ Used to revoke a user's access/secret pair
+
+ :param user_id: id of user
+ :param credential_id: access key for credentials
+ :returns: bool: success
+ """
+
+ self.identity_api.get_user(user_id)
+ self._get_credentials(credential_id)
+ ec2_credential_id = utils.hash_access_key(credential_id)
+ return self.credential_api.delete_credential(ec2_credential_id)
+
+ @staticmethod
+ def _convert_v3_to_ec2_credential(credential):
+ # Prior to bug #1259584 fix, blob was stored unserialized
+ # but it should be stored as a json string for compatibility
+ # with the v3 credentials API. Fall back to the old behavior
+ # for backwards compatibility with existing DB contents
+ try:
+ blob = jsonutils.loads(credential['blob'])
+ except TypeError:
+ blob = credential['blob']
+ return {'user_id': credential.get('user_id'),
+ 'tenant_id': credential.get('project_id'),
+ 'access': blob.get('access'),
+ 'secret': blob.get('secret'),
+ 'trust_id': blob.get('trust_id')}
+
+ def _get_credentials(self, credential_id):
+ """Return credentials from an ID.
+
+ :param credential_id: id of credential
+ :raises exception.Unauthorized: when credential id is invalid
+ :returns: credential: dict of ec2 credential.
+ """
+ ec2_credential_id = utils.hash_access_key(credential_id)
+ creds = self.credential_api.get_credential(ec2_credential_id)
+ if not creds:
+ raise exception.Unauthorized(message='EC2 access key not found.')
+ return self._convert_v3_to_ec2_credential(creds)
+
+
+@dependency.requires('policy_api', 'token_provider_api')
+class Ec2Controller(Ec2ControllerCommon, controller.V2Controller):
+
+ @controller.v2_deprecated
+ def authenticate(self, context, credentials=None, ec2Credentials=None):
+ (user_ref, tenant_ref, metadata_ref, roles_ref,
+ catalog_ref) = self._authenticate(credentials=credentials,
+ ec2credentials=ec2Credentials)
+
+ # NOTE(morganfainberg): Make sure the data is in correct form since it
+ # might be consumed external to Keystone and this is a v2.0 controller.
+ # The token provider does not explicitly care about user_ref version
+ # in this case, but the data is stored in the token itself and should
+ # match the version
+ user_ref = self.v3_to_v2_user(user_ref)
+ auth_token_data = dict(user=user_ref,
+ tenant=tenant_ref,
+ metadata=metadata_ref,
+ id='placeholder')
+ (token_id, token_data) = self.token_provider_api.issue_v2_token(
+ auth_token_data, roles_ref, catalog_ref)
+ return token_data
+
+ @controller.v2_deprecated
+ def get_credential(self, context, user_id, credential_id):
+ if not self._is_admin(context):
+ self._assert_identity(context, user_id)
+ return super(Ec2Controller, self).get_credential(user_id,
+ credential_id)
+
+ @controller.v2_deprecated
+ def get_credentials(self, context, user_id):
+ if not self._is_admin(context):
+ self._assert_identity(context, user_id)
+ return super(Ec2Controller, self).get_credentials(user_id)
+
+ @controller.v2_deprecated
+ def create_credential(self, context, user_id, tenant_id):
+ if not self._is_admin(context):
+ self._assert_identity(context, user_id)
+ return super(Ec2Controller, self).create_credential(context, user_id,
+ tenant_id)
+
+ @controller.v2_deprecated
+ def delete_credential(self, context, user_id, credential_id):
+ if not self._is_admin(context):
+ self._assert_identity(context, user_id)
+ self._assert_owner(user_id, credential_id)
+ return super(Ec2Controller, self).delete_credential(user_id,
+ credential_id)
+
+ def _assert_identity(self, context, user_id):
+ """Check that the provided token belongs to the user.
+
+ :param context: standard context
+ :param user_id: id of user
+ :raises exception.Forbidden: when token is invalid
+
+ """
+ try:
+ token_data = self.token_provider_api.validate_token(
+ context['token_id'])
+ except exception.TokenNotFound as e:
+ raise exception.Unauthorized(e)
+
+ token_ref = token_model.KeystoneToken(token_id=context['token_id'],
+ token_data=token_data)
+
+ if token_ref.user_id != user_id:
+ raise exception.Forbidden(_('Token belongs to another user'))
+
+ def _is_admin(self, context):
+ """Wrap admin assertion error return statement.
+
+ :param context: standard context
+ :returns: bool: success
+
+ """
+ try:
+ # NOTE(morganfainberg): policy_api is required for assert_admin
+ # to properly perform policy enforcement.
+ self.assert_admin(context)
+ return True
+ except exception.Forbidden:
+ return False
+
+ def _assert_owner(self, user_id, credential_id):
+ """Ensure the provided user owns the credential.
+
+ :param user_id: expected credential owner
+ :param credential_id: id of credential object
+ :raises exception.Forbidden: on failure
+
+ """
+ ec2_credential_id = utils.hash_access_key(credential_id)
+ cred_ref = self.credential_api.get_credential(ec2_credential_id)
+ if user_id != cred_ref['user_id']:
+ raise exception.Forbidden(_('Credential belongs to another user'))
+
+
+@dependency.requires('policy_api', 'token_provider_api')
+class Ec2ControllerV3(Ec2ControllerCommon, controller.V3Controller):
+
+ member_name = 'project'
+
+ def __init__(self):
+ super(Ec2ControllerV3, self).__init__()
+ self.get_member_from_driver = self.credential_api.get_credential
+
+ def _check_credential_owner_and_user_id_match(self, context, prep_info,
+ user_id, credential_id):
+ # NOTE(morganfainberg): this method needs to capture the arguments of
+ # the method that is decorated with @controller.protected() (with
+ # exception of the first argument ('context') since the protected
+ # method passes in *args, **kwargs. In this case, it is easier to see
+ # the expected input if the argspec is `user_id` and `credential_id`
+ # explicitly (matching the :class:`.ec2_delete_credential()` method
+ # below).
+ ref = {}
+ credential_id = utils.hash_access_key(credential_id)
+ ref['credential'] = self.credential_api.get_credential(credential_id)
+ # NOTE(morganfainberg): policy_api is required for this
+ # check_protection to properly be able to perform policy enforcement.
+ self.check_protection(context, prep_info, ref)
+
+ def authenticate(self, context, credentials=None, ec2Credentials=None):
+ (user_ref, project_ref, metadata_ref, roles_ref,
+ catalog_ref) = self._authenticate(credentials=credentials,
+ ec2credentials=ec2Credentials)
+
+ method_names = ['ec2credential']
+
+ token_id, token_data = self.token_provider_api.issue_v3_token(
+ user_ref['id'], method_names, project_id=project_ref['id'],
+ metadata_ref=metadata_ref)
+ return render_token_data_response(token_id, token_data)
+
+ @controller.protected(callback=_check_credential_owner_and_user_id_match)
+ def ec2_get_credential(self, context, user_id, credential_id):
+ return super(Ec2ControllerV3, self).get_credential(user_id,
+ credential_id)
+
+ @controller.protected()
+ def ec2_list_credentials(self, context, user_id):
+ return super(Ec2ControllerV3, self).get_credentials(user_id)
+
+ @controller.protected()
+ def ec2_create_credential(self, context, user_id, tenant_id):
+ return super(Ec2ControllerV3, self).create_credential(context, user_id,
+ tenant_id)
+
+ @controller.protected(callback=_check_credential_owner_and_user_id_match)
+ def ec2_delete_credential(self, context, user_id, credential_id):
+ return super(Ec2ControllerV3, self).delete_credential(user_id,
+ credential_id)
+
+
+def render_token_data_response(token_id, token_data):
+ """Render token data HTTP response.
+
+ Stash token ID into the X-Subject-Token header.
+
+ """
+ headers = [('X-Subject-Token', token_id)]
+
+ return wsgi.render_response(body=token_data,
+ status=(200, 'OK'), headers=headers)
diff --git a/keystone-moon/keystone/contrib/ec2/core.py b/keystone-moon/keystone/contrib/ec2/core.py
new file mode 100644
index 00000000..77857af8
--- /dev/null
+++ b/keystone-moon/keystone/contrib/ec2/core.py
@@ -0,0 +1,34 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import extension
+
+
+EXTENSION_DATA = {
+ 'name': 'OpenStack EC2 API',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-EC2/v1.0',
+ 'alias': 'OS-EC2',
+ 'updated': '2013-07-07T12:00:0-00:00',
+ 'description': 'OpenStack EC2 Credentials backend.',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ # TODO(ayoung): needs a description
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api',
+ }
+ ]}
+extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
diff --git a/keystone-moon/keystone/contrib/ec2/routers.py b/keystone-moon/keystone/contrib/ec2/routers.py
new file mode 100644
index 00000000..7b6bf115
--- /dev/null
+++ b/keystone-moon/keystone/contrib/ec2/routers.py
@@ -0,0 +1,95 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.ec2 import controllers
+
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation, extension_name='OS-EC2',
+ extension_version='1.0')
+
+build_parameter_relation = functools.partial(
+ json_home.build_v3_extension_parameter_relation, extension_name='OS-EC2',
+ extension_version='1.0')
+
+
+class Ec2Extension(wsgi.ExtensionRouter):
+ def add_routes(self, mapper):
+ ec2_controller = controllers.Ec2Controller()
+ # validation
+ mapper.connect(
+ '/ec2tokens',
+ controller=ec2_controller,
+ action='authenticate',
+ conditions=dict(method=['POST']))
+
+ # crud
+ mapper.connect(
+ '/users/{user_id}/credentials/OS-EC2',
+ controller=ec2_controller,
+ action='create_credential',
+ conditions=dict(method=['POST']))
+ mapper.connect(
+ '/users/{user_id}/credentials/OS-EC2',
+ controller=ec2_controller,
+ action='get_credentials',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/users/{user_id}/credentials/OS-EC2/{credential_id}',
+ controller=ec2_controller,
+ action='get_credential',
+ conditions=dict(method=['GET']))
+ mapper.connect(
+ '/users/{user_id}/credentials/OS-EC2/{credential_id}',
+ controller=ec2_controller,
+ action='delete_credential',
+ conditions=dict(method=['DELETE']))
+
+
+class Ec2ExtensionV3(wsgi.V3ExtensionRouter):
+
+ def add_routes(self, mapper):
+ ec2_controller = controllers.Ec2ControllerV3()
+ # validation
+ self._add_resource(
+ mapper, ec2_controller,
+ path='/ec2tokens',
+ post_action='authenticate',
+ rel=build_resource_relation(resource_name='ec2tokens'))
+
+ # crud
+ self._add_resource(
+ mapper, ec2_controller,
+ path='/users/{user_id}/credentials/OS-EC2',
+ get_action='ec2_list_credentials',
+ post_action='ec2_create_credential',
+ rel=build_resource_relation(resource_name='user_credentials'),
+ path_vars={
+ 'user_id': json_home.Parameters.USER_ID,
+ })
+ self._add_resource(
+ mapper, ec2_controller,
+ path='/users/{user_id}/credentials/OS-EC2/{credential_id}',
+ get_action='ec2_get_credential',
+ delete_action='ec2_delete_credential',
+ rel=build_resource_relation(resource_name='user_credential'),
+ path_vars={
+ 'credential_id':
+ build_parameter_relation(parameter_name='credential_id'),
+ 'user_id': json_home.Parameters.USER_ID,
+ })
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/__init__.py
new file mode 100644
index 00000000..72508c3e
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.endpoint_filter.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/backends/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/backends/__init__.py
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py b/keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py
new file mode 100644
index 00000000..6ac3c1ca
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py
@@ -0,0 +1,76 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+import six
+
+from keystone.catalog.backends import sql
+from keystone.catalog import core as catalog_core
+from keystone.common import dependency
+from keystone import exception
+
+CONF = cfg.CONF
+
+
+@dependency.requires('endpoint_filter_api')
+class EndpointFilterCatalog(sql.Catalog):
+ def get_v3_catalog(self, user_id, project_id):
+ substitutions = dict(six.iteritems(CONF))
+ substitutions.update({'tenant_id': project_id, 'user_id': user_id})
+
+ services = {}
+
+ refs = self.endpoint_filter_api.list_endpoints_for_project(project_id)
+
+ if (not refs and
+ CONF.endpoint_filter.return_all_endpoints_if_no_filter):
+ return super(EndpointFilterCatalog, self).get_v3_catalog(
+ user_id, project_id)
+
+ for entry in refs:
+ try:
+ endpoint = self.get_endpoint(entry['endpoint_id'])
+ if not endpoint['enabled']:
+ # Skip disabled endpoints.
+ continue
+ service_id = endpoint['service_id']
+ services.setdefault(
+ service_id,
+ self.get_service(service_id))
+ service = services[service_id]
+ del endpoint['service_id']
+ del endpoint['enabled']
+ del endpoint['legacy_endpoint_id']
+ endpoint['url'] = catalog_core.format_url(
+ endpoint['url'], substitutions)
+ # populate filtered endpoints
+ if 'endpoints' in services[service_id]:
+ service['endpoints'].append(endpoint)
+ else:
+ service['endpoints'] = [endpoint]
+ except exception.EndpointNotFound:
+ # remove bad reference from association
+ self.endpoint_filter_api.remove_endpoint_from_project(
+ entry['endpoint_id'], project_id)
+
+ # format catalog
+ catalog = []
+ for service_id, service in six.iteritems(services):
+ formatted_service = {}
+ formatted_service['id'] = service['id']
+ formatted_service['type'] = service['type']
+ formatted_service['endpoints'] = service['endpoints']
+ catalog.append(formatted_service)
+
+ return catalog
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py b/keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py
new file mode 100644
index 00000000..a998423f
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py
@@ -0,0 +1,224 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import sql
+from keystone import exception
+from keystone.i18n import _
+
+
+class ProjectEndpoint(sql.ModelBase, sql.ModelDictMixin):
+ """project-endpoint relationship table."""
+ __tablename__ = 'project_endpoint'
+ attributes = ['endpoint_id', 'project_id']
+ endpoint_id = sql.Column(sql.String(64),
+ primary_key=True,
+ nullable=False)
+ project_id = sql.Column(sql.String(64),
+ primary_key=True,
+ nullable=False)
+
+
+class EndpointGroup(sql.ModelBase, sql.ModelDictMixin):
+ """Endpoint Groups table."""
+ __tablename__ = 'endpoint_group'
+ attributes = ['id', 'name', 'description', 'filters']
+ mutable_attributes = frozenset(['name', 'description', 'filters'])
+ id = sql.Column(sql.String(64), primary_key=True)
+ name = sql.Column(sql.String(255), nullable=False)
+ description = sql.Column(sql.Text, nullable=True)
+ filters = sql.Column(sql.JsonBlob(), nullable=False)
+
+
+class ProjectEndpointGroupMembership(sql.ModelBase, sql.ModelDictMixin):
+ """Project to Endpoint group relationship table."""
+ __tablename__ = 'project_endpoint_group'
+ attributes = ['endpoint_group_id', 'project_id']
+ endpoint_group_id = sql.Column(sql.String(64),
+ sql.ForeignKey('endpoint_group.id'),
+ nullable=False)
+ project_id = sql.Column(sql.String(64), nullable=False)
+ __table_args__ = (sql.PrimaryKeyConstraint('endpoint_group_id',
+ 'project_id'), {})
+
+
+class EndpointFilter(object):
+
+ @sql.handle_conflicts(conflict_type='project_endpoint')
+ def add_endpoint_to_project(self, endpoint_id, project_id):
+ session = sql.get_session()
+ with session.begin():
+ endpoint_filter_ref = ProjectEndpoint(endpoint_id=endpoint_id,
+ project_id=project_id)
+ session.add(endpoint_filter_ref)
+
+ def _get_project_endpoint_ref(self, session, endpoint_id, project_id):
+ endpoint_filter_ref = session.query(ProjectEndpoint).get(
+ (endpoint_id, project_id))
+ if endpoint_filter_ref is None:
+ msg = _('Endpoint %(endpoint_id)s not found in project '
+ '%(project_id)s') % {'endpoint_id': endpoint_id,
+ 'project_id': project_id}
+ raise exception.NotFound(msg)
+ return endpoint_filter_ref
+
+ def check_endpoint_in_project(self, endpoint_id, project_id):
+ session = sql.get_session()
+ self._get_project_endpoint_ref(session, endpoint_id, project_id)
+
+ def remove_endpoint_from_project(self, endpoint_id, project_id):
+ session = sql.get_session()
+ endpoint_filter_ref = self._get_project_endpoint_ref(
+ session, endpoint_id, project_id)
+ with session.begin():
+ session.delete(endpoint_filter_ref)
+
+ def list_endpoints_for_project(self, project_id):
+ session = sql.get_session()
+ query = session.query(ProjectEndpoint)
+ query = query.filter_by(project_id=project_id)
+ endpoint_filter_refs = query.all()
+ return [ref.to_dict() for ref in endpoint_filter_refs]
+
+ def list_projects_for_endpoint(self, endpoint_id):
+ session = sql.get_session()
+ query = session.query(ProjectEndpoint)
+ query = query.filter_by(endpoint_id=endpoint_id)
+ endpoint_filter_refs = query.all()
+ return [ref.to_dict() for ref in endpoint_filter_refs]
+
+ def delete_association_by_endpoint(self, endpoint_id):
+ session = sql.get_session()
+ with session.begin():
+ query = session.query(ProjectEndpoint)
+ query = query.filter_by(endpoint_id=endpoint_id)
+ query.delete(synchronize_session=False)
+
+ def delete_association_by_project(self, project_id):
+ session = sql.get_session()
+ with session.begin():
+ query = session.query(ProjectEndpoint)
+ query = query.filter_by(project_id=project_id)
+ query.delete(synchronize_session=False)
+
+ def create_endpoint_group(self, endpoint_group_id, endpoint_group):
+ session = sql.get_session()
+ with session.begin():
+ endpoint_group_ref = EndpointGroup.from_dict(endpoint_group)
+ session.add(endpoint_group_ref)
+ return endpoint_group_ref.to_dict()
+
+ def _get_endpoint_group(self, session, endpoint_group_id):
+ endpoint_group_ref = session.query(EndpointGroup).get(
+ endpoint_group_id)
+ if endpoint_group_ref is None:
+ raise exception.EndpointGroupNotFound(
+ endpoint_group_id=endpoint_group_id)
+ return endpoint_group_ref
+
+ def get_endpoint_group(self, endpoint_group_id):
+ session = sql.get_session()
+ endpoint_group_ref = self._get_endpoint_group(session,
+ endpoint_group_id)
+ return endpoint_group_ref.to_dict()
+
+ def update_endpoint_group(self, endpoint_group_id, endpoint_group):
+ session = sql.get_session()
+ with session.begin():
+ endpoint_group_ref = self._get_endpoint_group(session,
+ endpoint_group_id)
+ old_endpoint_group = endpoint_group_ref.to_dict()
+ old_endpoint_group.update(endpoint_group)
+ new_endpoint_group = EndpointGroup.from_dict(old_endpoint_group)
+ for attr in EndpointGroup.mutable_attributes:
+ setattr(endpoint_group_ref, attr,
+ getattr(new_endpoint_group, attr))
+ return endpoint_group_ref.to_dict()
+
+ def delete_endpoint_group(self, endpoint_group_id):
+ session = sql.get_session()
+ endpoint_group_ref = self._get_endpoint_group(session,
+ endpoint_group_id)
+ with session.begin():
+ session.delete(endpoint_group_ref)
+ self._delete_endpoint_group_association_by_endpoint_group(
+ session, endpoint_group_id)
+
+ def get_endpoint_group_in_project(self, endpoint_group_id, project_id):
+ session = sql.get_session()
+ ref = self._get_endpoint_group_in_project(session,
+ endpoint_group_id,
+ project_id)
+ return ref.to_dict()
+
+ @sql.handle_conflicts(conflict_type='project_endpoint_group')
+ def add_endpoint_group_to_project(self, endpoint_group_id, project_id):
+ session = sql.get_session()
+
+ with session.begin():
+ # Create a new Project Endpoint group entity
+ endpoint_group_project_ref = ProjectEndpointGroupMembership(
+ endpoint_group_id=endpoint_group_id, project_id=project_id)
+ session.add(endpoint_group_project_ref)
+
+ def _get_endpoint_group_in_project(self, session,
+ endpoint_group_id, project_id):
+ endpoint_group_project_ref = session.query(
+ ProjectEndpointGroupMembership).get((endpoint_group_id,
+ project_id))
+ if endpoint_group_project_ref is None:
+ msg = _('Endpoint Group Project Association not found')
+ raise exception.NotFound(msg)
+ else:
+ return endpoint_group_project_ref
+
+ def list_endpoint_groups(self):
+ session = sql.get_session()
+ query = session.query(EndpointGroup)
+ endpoint_group_refs = query.all()
+ return [e.to_dict() for e in endpoint_group_refs]
+
+ def list_endpoint_groups_for_project(self, project_id):
+ session = sql.get_session()
+ query = session.query(ProjectEndpointGroupMembership)
+ query = query.filter_by(project_id=project_id)
+ endpoint_group_refs = query.all()
+ return [ref.to_dict() for ref in endpoint_group_refs]
+
+ def remove_endpoint_group_from_project(self, endpoint_group_id,
+ project_id):
+ session = sql.get_session()
+ endpoint_group_project_ref = self._get_endpoint_group_in_project(
+ session, endpoint_group_id, project_id)
+ with session.begin():
+ session.delete(endpoint_group_project_ref)
+
+ def list_projects_associated_with_endpoint_group(self, endpoint_group_id):
+ session = sql.get_session()
+ query = session.query(ProjectEndpointGroupMembership)
+ query = query.filter_by(endpoint_group_id=endpoint_group_id)
+ endpoint_group_refs = query.all()
+ return [ref.to_dict() for ref in endpoint_group_refs]
+
+ def _delete_endpoint_group_association_by_endpoint_group(
+ self, session, endpoint_group_id):
+ query = session.query(ProjectEndpointGroupMembership)
+ query = query.filter_by(endpoint_group_id=endpoint_group_id)
+ query.delete()
+
+ def delete_endpoint_group_association_by_project(self, project_id):
+ session = sql.get_session()
+ with session.begin():
+ query = session.query(ProjectEndpointGroupMembership)
+ query = query.filter_by(project_id=project_id)
+ query.delete()
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/controllers.py b/keystone-moon/keystone/contrib/endpoint_filter/controllers.py
new file mode 100644
index 00000000..dc4ef7a3
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/controllers.py
@@ -0,0 +1,300 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+
+from keystone.catalog import controllers as catalog_controllers
+from keystone.common import controller
+from keystone.common import dependency
+from keystone.common import validation
+from keystone.contrib.endpoint_filter import schema
+from keystone import exception
+from keystone import notifications
+from keystone import resource
+
+
+@dependency.requires('catalog_api', 'endpoint_filter_api', 'resource_api')
+class _ControllerBase(controller.V3Controller):
+ """Base behaviors for endpoint filter controllers."""
+
+ def _get_endpoint_groups_for_project(self, project_id):
+ # recover the project endpoint group memberships and for each
+ # membership recover the endpoint group
+ self.resource_api.get_project(project_id)
+ try:
+ refs = self.endpoint_filter_api.list_endpoint_groups_for_project(
+ project_id)
+ endpoint_groups = [self.endpoint_filter_api.get_endpoint_group(
+ ref['endpoint_group_id']) for ref in refs]
+ return endpoint_groups
+ except exception.EndpointGroupNotFound:
+ return []
+
+ def _get_endpoints_filtered_by_endpoint_group(self, endpoint_group_id):
+ endpoints = self.catalog_api.list_endpoints()
+ filters = self.endpoint_filter_api.get_endpoint_group(
+ endpoint_group_id)['filters']
+ filtered_endpoints = []
+
+ for endpoint in endpoints:
+ is_candidate = True
+ for key, value in six.iteritems(filters):
+ if endpoint[key] != value:
+ is_candidate = False
+ break
+ if is_candidate:
+ filtered_endpoints.append(endpoint)
+ return filtered_endpoints
+
+
+class EndpointFilterV3Controller(_ControllerBase):
+
+ def __init__(self):
+ super(EndpointFilterV3Controller, self).__init__()
+ notifications.register_event_callback(
+ notifications.ACTIONS.deleted, 'project',
+ self._on_project_or_endpoint_delete)
+ notifications.register_event_callback(
+ notifications.ACTIONS.deleted, 'endpoint',
+ self._on_project_or_endpoint_delete)
+
+ def _on_project_or_endpoint_delete(self, service, resource_type, operation,
+ payload):
+ project_or_endpoint_id = payload['resource_info']
+ if resource_type == 'project':
+ self.endpoint_filter_api.delete_association_by_project(
+ project_or_endpoint_id)
+ else:
+ self.endpoint_filter_api.delete_association_by_endpoint(
+ project_or_endpoint_id)
+
+ @controller.protected()
+ def add_endpoint_to_project(self, context, project_id, endpoint_id):
+ """Establishes an association between an endpoint and a project."""
+ # NOTE(gyee): we just need to make sure endpoint and project exist
+ # first. We don't really care whether if project is disabled.
+ # The relationship can still be established even with a disabled
+ # project as there are no security implications.
+ self.catalog_api.get_endpoint(endpoint_id)
+ self.resource_api.get_project(project_id)
+ self.endpoint_filter_api.add_endpoint_to_project(endpoint_id,
+ project_id)
+
+ @controller.protected()
+ def check_endpoint_in_project(self, context, project_id, endpoint_id):
+ """Verifies endpoint is currently associated with given project."""
+ self.catalog_api.get_endpoint(endpoint_id)
+ self.resource_api.get_project(project_id)
+ self.endpoint_filter_api.check_endpoint_in_project(endpoint_id,
+ project_id)
+
+ @controller.protected()
+ def list_endpoints_for_project(self, context, project_id):
+ """List all endpoints currently associated with a given project."""
+ self.resource_api.get_project(project_id)
+ refs = self.endpoint_filter_api.list_endpoints_for_project(project_id)
+ filtered_endpoints = {ref['endpoint_id']:
+ self.catalog_api.get_endpoint(ref['endpoint_id'])
+ for ref in refs}
+
+ # need to recover endpoint_groups associated with project
+ # then for each endpoint group return the endpoints.
+ endpoint_groups = self._get_endpoint_groups_for_project(project_id)
+ for endpoint_group in endpoint_groups:
+ endpoint_refs = self._get_endpoints_filtered_by_endpoint_group(
+ endpoint_group['id'])
+ # now check if any endpoints for current endpoint group are not
+ # contained in the list of filtered endpoints
+ for endpoint_ref in endpoint_refs:
+ if endpoint_ref['id'] not in filtered_endpoints:
+ filtered_endpoints[endpoint_ref['id']] = endpoint_ref
+
+ return catalog_controllers.EndpointV3.wrap_collection(
+ context, [v for v in six.itervalues(filtered_endpoints)])
+
+ @controller.protected()
+ def remove_endpoint_from_project(self, context, project_id, endpoint_id):
+ """Remove the endpoint from the association with given project."""
+ self.endpoint_filter_api.remove_endpoint_from_project(endpoint_id,
+ project_id)
+
+ @controller.protected()
+ def list_projects_for_endpoint(self, context, endpoint_id):
+ """Return a list of projects associated with the endpoint."""
+ self.catalog_api.get_endpoint(endpoint_id)
+ refs = self.endpoint_filter_api.list_projects_for_endpoint(endpoint_id)
+
+ projects = [self.resource_api.get_project(
+ ref['project_id']) for ref in refs]
+ return resource.controllers.ProjectV3.wrap_collection(context,
+ projects)
+
+
+class EndpointGroupV3Controller(_ControllerBase):
+ collection_name = 'endpoint_groups'
+ member_name = 'endpoint_group'
+
+ VALID_FILTER_KEYS = ['service_id', 'region_id', 'interface']
+
+ def __init__(self):
+ super(EndpointGroupV3Controller, self).__init__()
+
+ @classmethod
+ def base_url(cls, context, path=None):
+ """Construct a path and pass it to V3Controller.base_url method."""
+
+ path = '/OS-EP-FILTER/' + cls.collection_name
+ return super(EndpointGroupV3Controller, cls).base_url(context,
+ path=path)
+
+ @controller.protected()
+ @validation.validated(schema.endpoint_group_create, 'endpoint_group')
+ def create_endpoint_group(self, context, endpoint_group):
+ """Creates an Endpoint Group with the associated filters."""
+ ref = self._assign_unique_id(self._normalize_dict(endpoint_group))
+ self._require_attribute(ref, 'filters')
+ self._require_valid_filter(ref)
+ ref = self.endpoint_filter_api.create_endpoint_group(ref['id'], ref)
+ return EndpointGroupV3Controller.wrap_member(context, ref)
+
+ def _require_valid_filter(self, endpoint_group):
+ filters = endpoint_group.get('filters')
+ for key in six.iterkeys(filters):
+ if key not in self.VALID_FILTER_KEYS:
+ raise exception.ValidationError(
+ attribute=self._valid_filter_keys(),
+ target='endpoint_group')
+
+ def _valid_filter_keys(self):
+ return ' or '.join(self.VALID_FILTER_KEYS)
+
+ @controller.protected()
+ def get_endpoint_group(self, context, endpoint_group_id):
+ """Retrieve the endpoint group associated with the id if exists."""
+ ref = self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
+ return EndpointGroupV3Controller.wrap_member(
+ context, ref)
+
+ @controller.protected()
+ @validation.validated(schema.endpoint_group_update, 'endpoint_group')
+ def update_endpoint_group(self, context, endpoint_group_id,
+ endpoint_group):
+ """Update fixed values and/or extend the filters."""
+ if 'filters' in endpoint_group:
+ self._require_valid_filter(endpoint_group)
+ ref = self.endpoint_filter_api.update_endpoint_group(endpoint_group_id,
+ endpoint_group)
+ return EndpointGroupV3Controller.wrap_member(
+ context, ref)
+
+ @controller.protected()
+ def delete_endpoint_group(self, context, endpoint_group_id):
+ """Delete endpoint_group."""
+ self.endpoint_filter_api.delete_endpoint_group(endpoint_group_id)
+
+ @controller.protected()
+ def list_endpoint_groups(self, context):
+ """List all endpoint groups."""
+ refs = self.endpoint_filter_api.list_endpoint_groups()
+ return EndpointGroupV3Controller.wrap_collection(
+ context, refs)
+
+ @controller.protected()
+ def list_endpoint_groups_for_project(self, context, project_id):
+ """List all endpoint groups associated with a given project."""
+ return EndpointGroupV3Controller.wrap_collection(
+ context, self._get_endpoint_groups_for_project(project_id))
+
+ @controller.protected()
+ def list_projects_associated_with_endpoint_group(self,
+ context,
+ endpoint_group_id):
+ """List all projects associated with endpoint group."""
+ endpoint_group_refs = (self.endpoint_filter_api.
+ list_projects_associated_with_endpoint_group(
+ endpoint_group_id))
+ projects = []
+ for endpoint_group_ref in endpoint_group_refs:
+ project = self.resource_api.get_project(
+ endpoint_group_ref['project_id'])
+ if project:
+ projects.append(project)
+ return resource.controllers.ProjectV3.wrap_collection(context,
+ projects)
+
+ @controller.protected()
+ def list_endpoints_associated_with_endpoint_group(self,
+ context,
+ endpoint_group_id):
+ """List all the endpoints filtered by a specific endpoint group."""
+ filtered_endpoints = self._get_endpoints_filtered_by_endpoint_group(
+ endpoint_group_id)
+ return catalog_controllers.EndpointV3.wrap_collection(
+ context, filtered_endpoints)
+
+
+class ProjectEndpointGroupV3Controller(_ControllerBase):
+ collection_name = 'project_endpoint_groups'
+ member_name = 'project_endpoint_group'
+
+ def __init__(self):
+ super(ProjectEndpointGroupV3Controller, self).__init__()
+ notifications.register_event_callback(
+ notifications.ACTIONS.deleted, 'project',
+ self._on_project_delete)
+
+ def _on_project_delete(self, service, resource_type,
+ operation, payload):
+ project_id = payload['resource_info']
+ (self.endpoint_filter_api.
+ delete_endpoint_group_association_by_project(
+ project_id))
+
+ @controller.protected()
+ def get_endpoint_group_in_project(self, context, endpoint_group_id,
+ project_id):
+ """Retrieve the endpoint group associated with the id if exists."""
+ self.resource_api.get_project(project_id)
+ self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
+ ref = self.endpoint_filter_api.get_endpoint_group_in_project(
+ endpoint_group_id, project_id)
+ return ProjectEndpointGroupV3Controller.wrap_member(
+ context, ref)
+
+ @controller.protected()
+ def add_endpoint_group_to_project(self, context, endpoint_group_id,
+ project_id):
+ """Creates an association between an endpoint group and project."""
+ self.resource_api.get_project(project_id)
+ self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
+ self.endpoint_filter_api.add_endpoint_group_to_project(
+ endpoint_group_id, project_id)
+
+ @controller.protected()
+ def remove_endpoint_group_from_project(self, context, endpoint_group_id,
+ project_id):
+ """Remove the endpoint group from associated project."""
+ self.resource_api.get_project(project_id)
+ self.endpoint_filter_api.get_endpoint_group(endpoint_group_id)
+ self.endpoint_filter_api.remove_endpoint_group_from_project(
+ endpoint_group_id, project_id)
+
+ @classmethod
+ def _add_self_referential_link(cls, context, ref):
+ url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
+ '/projects/%(project_id)s' % {
+ 'endpoint_group_id': ref['endpoint_group_id'],
+ 'project_id': ref['project_id']})
+ ref.setdefault('links', {})
+ ref['links']['self'] = url
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/core.py b/keystone-moon/keystone/contrib/endpoint_filter/core.py
new file mode 100644
index 00000000..972b65dd
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/core.py
@@ -0,0 +1,289 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import abc
+
+from oslo_config import cfg
+from oslo_log import log
+import six
+
+from keystone.common import dependency
+from keystone.common import extension
+from keystone.common import manager
+from keystone import exception
+
+
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+
+extension_data = {
+ 'name': 'OpenStack Keystone Endpoint Filter API',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-EP-FILTER/v1.0',
+ 'alias': 'OS-EP-FILTER',
+ 'updated': '2013-07-23T12:00:0-00:00',
+ 'description': 'OpenStack Keystone Endpoint Filter API.',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ # TODO(ayoung): needs a description
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api/blob/master'
+ '/openstack-identity-api/v3/src/markdown/'
+ 'identity-api-v3-os-ep-filter-ext.md',
+ }
+ ]}
+extension.register_admin_extension(extension_data['alias'], extension_data)
+
+
+@dependency.provider('endpoint_filter_api')
+class Manager(manager.Manager):
+ """Default pivot point for the Endpoint Filter backend.
+
+ See :mod:`keystone.common.manager.Manager` for more details on how this
+ dynamically calls the backend.
+
+ """
+
+ def __init__(self):
+ super(Manager, self).__init__(CONF.endpoint_filter.driver)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Driver(object):
+ """Interface description for an Endpoint Filter driver."""
+
+ @abc.abstractmethod
+ def add_endpoint_to_project(self, endpoint_id, project_id):
+ """Create an endpoint to project association.
+
+ :param endpoint_id: identity of endpoint to associate
+ :type endpoint_id: string
+ :param project_id: identity of the project to be associated with
+ :type project_id: string
+ :raises: keystone.exception.Conflict,
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def remove_endpoint_from_project(self, endpoint_id, project_id):
+ """Removes an endpoint to project association.
+
+ :param endpoint_id: identity of endpoint to remove
+ :type endpoint_id: string
+ :param project_id: identity of the project associated with
+ :type project_id: string
+ :raises: exception.NotFound
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def check_endpoint_in_project(self, endpoint_id, project_id):
+ """Checks if an endpoint is associated with a project.
+
+ :param endpoint_id: identity of endpoint to check
+ :type endpoint_id: string
+ :param project_id: identity of the project associated with
+ :type project_id: string
+ :raises: exception.NotFound
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_endpoints_for_project(self, project_id):
+ """List all endpoints associated with a project.
+
+ :param project_id: identity of the project to check
+ :type project_id: string
+ :returns: a list of identity endpoint ids or an empty list.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_projects_for_endpoint(self, endpoint_id):
+ """List all projects associated with an endpoint.
+
+ :param endpoint_id: identity of endpoint to check
+ :type endpoint_id: string
+ :returns: a list of projects or an empty list.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_association_by_endpoint(self, endpoint_id):
+ """Removes all the endpoints to project association with endpoint.
+
+ :param endpoint_id: identity of endpoint to check
+ :type endpoint_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented()
+
+ @abc.abstractmethod
+ def delete_association_by_project(self, project_id):
+ """Removes all the endpoints to project association with project.
+
+ :param project_id: identity of the project to check
+ :type project_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented()
+
+ @abc.abstractmethod
+ def create_endpoint_group(self, endpoint_group):
+ """Create an endpoint group.
+
+ :param endpoint_group: endpoint group to create
+ :type endpoint_group: dictionary
+ :raises: keystone.exception.Conflict,
+ :returns: an endpoint group representation.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_endpoint_group(self, endpoint_group_id):
+ """Get an endpoint group.
+
+ :param endpoint_group_id: identity of endpoint group to retrieve
+ :type endpoint_group_id: string
+ :raises: exception.NotFound
+ :returns: an endpoint group representation.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def update_endpoint_group(self, endpoint_group_id, endpoint_group):
+ """Update an endpoint group.
+
+ :param endpoint_group_id: identity of endpoint group to retrieve
+ :type endpoint_group_id: string
+ :param endpoint_group: A full or partial endpoint_group
+ :type endpoint_group: dictionary
+ :raises: exception.NotFound
+ :returns: an endpoint group representation.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_endpoint_group(self, endpoint_group_id):
+ """Delete an endpoint group.
+
+ :param endpoint_group_id: identity of endpoint group to delete
+ :type endpoint_group_id: string
+ :raises: exception.NotFound
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def add_endpoint_group_to_project(self, endpoint_group_id, project_id):
+ """Adds an endpoint group to project association.
+
+ :param endpoint_group_id: identity of endpoint to associate
+ :type endpoint_group_id: string
+ :param project_id: identity of project to associate
+ :type project_id: string
+ :raises: keystone.exception.Conflict,
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_endpoint_group_in_project(self, endpoint_group_id, project_id):
+ """Get endpoint group to project association.
+
+ :param endpoint_group_id: identity of endpoint group to retrieve
+ :type endpoint_group_id: string
+ :param project_id: identity of project to associate
+ :type project_id: string
+ :raises: exception.NotFound
+ :returns: a project endpoint group representation.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_endpoint_groups(self):
+ """List all endpoint groups.
+
+ :raises: exception.NotFound
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_endpoint_groups_for_project(self, project_id):
+ """List all endpoint group to project associations for a project.
+
+ :param project_id: identity of project to associate
+ :type project_id: string
+ :raises: exception.NotFound
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_projects_associated_with_endpoint_group(self, endpoint_group_id):
+ """List all projects associated with endpoint group.
+
+ :param endpoint_group_id: identity of endpoint to associate
+ :type endpoint_group_id: string
+ :raises: exception.NotFound
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def remove_endpoint_group_from_project(self, endpoint_group_id,
+ project_id):
+ """Remove an endpoint to project association.
+
+ :param endpoint_group_id: identity of endpoint to associate
+ :type endpoint_group_id: string
+ :param project_id: identity of project to associate
+ :type project_id: string
+ :raises: exception.NotFound
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_endpoint_group_association_by_project(self, project_id):
+ """Remove endpoint group to project associations.
+
+ :param project_id: identity of the project to check
+ :type project_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/__init__.py
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg
new file mode 100644
index 00000000..c7d34785
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg
@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=endpoint_filter
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py
new file mode 100644
index 00000000..090e7f47
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py
@@ -0,0 +1,47 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine; bind
+ # migrate_engine to your metadata
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ endpoint_filtering_table = sql.Table(
+ 'project_endpoint',
+ meta,
+ sql.Column(
+ 'endpoint_id',
+ sql.String(64),
+ primary_key=True,
+ nullable=False),
+ sql.Column(
+ 'project_id',
+ sql.String(64),
+ primary_key=True,
+ nullable=False))
+
+ endpoint_filtering_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ # Operations to reverse the above upgrade go here.
+ for table_name in ['project_endpoint']:
+ table = sql.Table(table_name, meta, autoload=True)
+ table.drop()
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py
new file mode 100644
index 00000000..5f80160a
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py
@@ -0,0 +1,51 @@
+# Copyright 2014 Hewlett-Packard Company
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine; bind
+ # migrate_engine to your metadata
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ endpoint_group_table = sql.Table(
+ 'endpoint_group',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('name', sql.String(255), nullable=False),
+ sql.Column('description', sql.Text, nullable=True),
+ sql.Column('filters', sql.Text(), nullable=False))
+ endpoint_group_table.create(migrate_engine, checkfirst=True)
+
+ project_endpoint_group_table = sql.Table(
+ 'project_endpoint_group',
+ meta,
+ sql.Column('endpoint_group_id', sql.String(64),
+ sql.ForeignKey('endpoint_group.id'), nullable=False),
+ sql.Column('project_id', sql.String(64), nullable=False),
+ sql.PrimaryKeyConstraint('endpoint_group_id',
+ 'project_id'))
+ project_endpoint_group_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ # Operations to reverse the above upgrade go here.
+ for table_name in ['project_endpoint_group',
+ 'endpoint_group']:
+ table = sql.Table(table_name, meta, autoload=True)
+ table.drop()
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/__init__.py
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/routers.py b/keystone-moon/keystone/contrib/endpoint_filter/routers.py
new file mode 100644
index 00000000..00c8cd72
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/routers.py
@@ -0,0 +1,149 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.endpoint_filter import controllers
+
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-EP-FILTER', extension_version='1.0')
+
+build_parameter_relation = functools.partial(
+ json_home.build_v3_extension_parameter_relation,
+ extension_name='OS-EP-FILTER', extension_version='1.0')
+
+ENDPOINT_GROUP_PARAMETER_RELATION = build_parameter_relation(
+ parameter_name='endpoint_group_id')
+
+
+class EndpointFilterExtension(wsgi.V3ExtensionRouter):
+ """API Endpoints for the Endpoint Filter extension.
+
+ The API looks like::
+
+ PUT /OS-EP-FILTER/projects/$project_id/endpoints/$endpoint_id
+ GET /OS-EP-FILTER/projects/$project_id/endpoints/$endpoint_id
+ HEAD /OS-EP-FILTER/projects/$project_id/endpoints/$endpoint_id
+ DELETE /OS-EP-FILTER/projects/$project_id/endpoints/$endpoint_id
+ GET /OS-EP-FILTER/endpoints/$endpoint_id/projects
+ GET /OS-EP-FILTER/projects/$project_id/endpoints
+
+ GET /OS-EP-FILTER/endpoint_groups
+ POST /OS-EP-FILTER/endpoint_groups
+ GET /OS-EP-FILTER/endpoint_groups/$endpoint_group_id
+ HEAD /OS-EP-FILTER/endpoint_groups/$endpoint_group_id
+ PATCH /OS-EP-FILTER/endpoint_groups/$endpoint_group_id
+ DELETE /OS-EP-FILTER/endpoint_groups/$endpoint_group_id
+
+ GET /OS-EP-FILTER/endpoint_groups/$endpoint_group_id/projects
+ GET /OS-EP-FILTER/endpoint_groups/$endpoint_group_id/endpoints
+
+ PUT /OS-EP-FILTER/endpoint_groups/$endpoint_group/projects/$project_id
+ GET /OS-EP-FILTER/endpoint_groups/$endpoint_group/projects/$project_id
+ HEAD /OS-EP-FILTER/endpoint_groups/$endpoint_group/projects/$project_id
+ DELETE /OS-EP-FILTER/endpoint_groups/$endpoint_group/projects/
+ $project_id
+
+ """
+ PATH_PREFIX = '/OS-EP-FILTER'
+ PATH_PROJECT_ENDPOINT = '/projects/{project_id}/endpoints/{endpoint_id}'
+ PATH_ENDPOINT_GROUPS = '/endpoint_groups/{endpoint_group_id}'
+ PATH_ENDPOINT_GROUP_PROJECTS = PATH_ENDPOINT_GROUPS + (
+ '/projects/{project_id}')
+
+ def add_routes(self, mapper):
+ endpoint_filter_controller = controllers.EndpointFilterV3Controller()
+ endpoint_group_controller = controllers.EndpointGroupV3Controller()
+ project_endpoint_group_controller = (
+ controllers.ProjectEndpointGroupV3Controller())
+
+ self._add_resource(
+ mapper, endpoint_filter_controller,
+ path=self.PATH_PREFIX + '/endpoints/{endpoint_id}/projects',
+ get_action='list_projects_for_endpoint',
+ rel=build_resource_relation(resource_name='endpoint_projects'),
+ path_vars={
+ 'endpoint_id': json_home.Parameters.ENDPOINT_ID,
+ })
+ self._add_resource(
+ mapper, endpoint_filter_controller,
+ path=self.PATH_PREFIX + self.PATH_PROJECT_ENDPOINT,
+ get_head_action='check_endpoint_in_project',
+ put_action='add_endpoint_to_project',
+ delete_action='remove_endpoint_from_project',
+ rel=build_resource_relation(resource_name='project_endpoint'),
+ path_vars={
+ 'endpoint_id': json_home.Parameters.ENDPOINT_ID,
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ })
+ self._add_resource(
+ mapper, endpoint_filter_controller,
+ path=self.PATH_PREFIX + '/projects/{project_id}/endpoints',
+ get_action='list_endpoints_for_project',
+ rel=build_resource_relation(resource_name='project_endpoints'),
+ path_vars={
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ })
+ self._add_resource(
+ mapper, endpoint_group_controller,
+ path=self.PATH_PREFIX + '/endpoint_groups',
+ get_action='list_endpoint_groups',
+ post_action='create_endpoint_group',
+ rel=build_resource_relation(resource_name='endpoint_groups'))
+ self._add_resource(
+ mapper, endpoint_group_controller,
+ path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUPS,
+ get_head_action='get_endpoint_group',
+ patch_action='update_endpoint_group',
+ delete_action='delete_endpoint_group',
+ rel=build_resource_relation(resource_name='endpoint_group'),
+ path_vars={
+ 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION
+ })
+ self._add_resource(
+ mapper, project_endpoint_group_controller,
+ path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUP_PROJECTS,
+ get_head_action='get_endpoint_group_in_project',
+ put_action='add_endpoint_group_to_project',
+ delete_action='remove_endpoint_group_from_project',
+ rel=build_resource_relation(
+ resource_name='endpoint_group_to_project_association'),
+ path_vars={
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION
+ })
+ self._add_resource(
+ mapper, endpoint_group_controller,
+ path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUPS + (
+ '/projects'),
+ get_action='list_projects_associated_with_endpoint_group',
+ rel=build_resource_relation(
+ resource_name='projects_associated_with_endpoint_group'),
+ path_vars={
+ 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION
+ })
+ self._add_resource(
+ mapper, endpoint_group_controller,
+ path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUPS + (
+ '/endpoints'),
+ get_action='list_endpoints_associated_with_endpoint_group',
+ rel=build_resource_relation(
+ resource_name='endpoints_in_endpoint_group'),
+ path_vars={
+ 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION
+ })
diff --git a/keystone-moon/keystone/contrib/endpoint_filter/schema.py b/keystone-moon/keystone/contrib/endpoint_filter/schema.py
new file mode 100644
index 00000000..cbe54e36
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_filter/schema.py
@@ -0,0 +1,35 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import validation
+from keystone.common.validation import parameter_types
+
+
+_endpoint_group_properties = {
+ 'description': validation.nullable(parameter_types.description),
+ 'filters': {
+ 'type': 'object'
+ },
+ 'name': parameter_types.name
+}
+
+endpoint_group_create = {
+ 'type': 'object',
+ 'properties': _endpoint_group_properties,
+ 'required': ['name', 'filters']
+}
+
+endpoint_group_update = {
+ 'type': 'object',
+ 'properties': _endpoint_group_properties,
+ 'minProperties': 1
+}
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/__init__.py
new file mode 100644
index 00000000..12722dc5
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.endpoint_policy.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/backends/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/backends/__init__.py
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py b/keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py
new file mode 100644
index 00000000..484444f1
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py
@@ -0,0 +1,140 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import sqlalchemy
+
+from keystone.common import sql
+from keystone import exception
+
+
+class PolicyAssociation(sql.ModelBase, sql.ModelDictMixin):
+ __tablename__ = 'policy_association'
+ attributes = ['policy_id', 'endpoint_id', 'region_id', 'service_id']
+ # The id column is never exposed outside this module. It only exists to
+ # provide a primary key, given that the real columns we would like to use
+ # (endpoint_id, service_id, region_id) can be null
+ id = sql.Column(sql.String(64), primary_key=True)
+ policy_id = sql.Column(sql.String(64), nullable=False)
+ endpoint_id = sql.Column(sql.String(64), nullable=True)
+ service_id = sql.Column(sql.String(64), nullable=True)
+ region_id = sql.Column(sql.String(64), nullable=True)
+ __table_args__ = (sql.UniqueConstraint('endpoint_id', 'service_id',
+ 'region_id'), {})
+
+ def to_dict(self):
+ """Returns the model's attributes as a dictionary.
+
+ We override the standard method in order to hide the id column,
+ since this only exists to provide the table with a primary key.
+
+ """
+ d = {}
+ for attr in self.__class__.attributes:
+ d[attr] = getattr(self, attr)
+ return d
+
+
+class EndpointPolicy(object):
+
+ def create_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ with sql.transaction() as session:
+ try:
+ # See if there is already a row for this association, and if
+ # so, update it with the new policy_id
+ query = session.query(PolicyAssociation)
+ query = query.filter_by(endpoint_id=endpoint_id)
+ query = query.filter_by(service_id=service_id)
+ query = query.filter_by(region_id=region_id)
+ association = query.one()
+ association.policy_id = policy_id
+ except sql.NotFound:
+ association = PolicyAssociation(id=uuid.uuid4().hex,
+ policy_id=policy_id,
+ endpoint_id=endpoint_id,
+ service_id=service_id,
+ region_id=region_id)
+ session.add(association)
+
+ def check_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ sql_constraints = sqlalchemy.and_(
+ PolicyAssociation.policy_id == policy_id,
+ PolicyAssociation.endpoint_id == endpoint_id,
+ PolicyAssociation.service_id == service_id,
+ PolicyAssociation.region_id == region_id)
+
+ # NOTE(henry-nash): Getting a single value to save object
+ # management overhead.
+ with sql.transaction() as session:
+ if session.query(PolicyAssociation.id).filter(
+ sql_constraints).distinct().count() == 0:
+ raise exception.PolicyAssociationNotFound()
+
+ def delete_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ with sql.transaction() as session:
+ query = session.query(PolicyAssociation)
+ query = query.filter_by(policy_id=policy_id)
+ query = query.filter_by(endpoint_id=endpoint_id)
+ query = query.filter_by(service_id=service_id)
+ query = query.filter_by(region_id=region_id)
+ query.delete()
+
+ def get_policy_association(self, endpoint_id=None,
+ service_id=None, region_id=None):
+ sql_constraints = sqlalchemy.and_(
+ PolicyAssociation.endpoint_id == endpoint_id,
+ PolicyAssociation.service_id == service_id,
+ PolicyAssociation.region_id == region_id)
+
+ try:
+ with sql.transaction() as session:
+ policy_id = session.query(PolicyAssociation.policy_id).filter(
+ sql_constraints).distinct().one()
+ return {'policy_id': policy_id}
+ except sql.NotFound:
+ raise exception.PolicyAssociationNotFound()
+
+ def list_associations_for_policy(self, policy_id):
+ with sql.transaction() as session:
+ query = session.query(PolicyAssociation)
+ query = query.filter_by(policy_id=policy_id)
+ return [ref.to_dict() for ref in query.all()]
+
+ def delete_association_by_endpoint(self, endpoint_id):
+ with sql.transaction() as session:
+ query = session.query(PolicyAssociation)
+ query = query.filter_by(endpoint_id=endpoint_id)
+ query.delete()
+
+ def delete_association_by_service(self, service_id):
+ with sql.transaction() as session:
+ query = session.query(PolicyAssociation)
+ query = query.filter_by(service_id=service_id)
+ query.delete()
+
+ def delete_association_by_region(self, region_id):
+ with sql.transaction() as session:
+ query = session.query(PolicyAssociation)
+ query = query.filter_by(region_id=region_id)
+ query.delete()
+
+ def delete_association_by_policy(self, policy_id):
+ with sql.transaction() as session:
+ query = session.query(PolicyAssociation)
+ query = query.filter_by(policy_id=policy_id)
+ query.delete()
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/controllers.py b/keystone-moon/keystone/contrib/endpoint_policy/controllers.py
new file mode 100644
index 00000000..b96834dc
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/controllers.py
@@ -0,0 +1,166 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import controller
+from keystone.common import dependency
+from keystone import notifications
+
+
+@dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api')
+class EndpointPolicyV3Controller(controller.V3Controller):
+ collection_name = 'endpoints'
+ member_name = 'endpoint'
+
+ def __init__(self):
+ super(EndpointPolicyV3Controller, self).__init__()
+ notifications.register_event_callback(
+ 'deleted', 'endpoint', self._on_endpoint_delete)
+ notifications.register_event_callback(
+ 'deleted', 'service', self._on_service_delete)
+ notifications.register_event_callback(
+ 'deleted', 'region', self._on_region_delete)
+ notifications.register_event_callback(
+ 'deleted', 'policy', self._on_policy_delete)
+
+ def _on_endpoint_delete(self, service, resource_type, operation, payload):
+ self.endpoint_policy_api.delete_association_by_endpoint(
+ payload['resource_info'])
+
+ def _on_service_delete(self, service, resource_type, operation, payload):
+ self.endpoint_policy_api.delete_association_by_service(
+ payload['resource_info'])
+
+ def _on_region_delete(self, service, resource_type, operation, payload):
+ self.endpoint_policy_api.delete_association_by_region(
+ payload['resource_info'])
+
+ def _on_policy_delete(self, service, resource_type, operation, payload):
+ self.endpoint_policy_api.delete_association_by_policy(
+ payload['resource_info'])
+
+ @controller.protected()
+ def create_policy_association_for_endpoint(self, context,
+ policy_id, endpoint_id):
+ """Create an association between a policy and an endpoint."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_endpoint(endpoint_id)
+ self.endpoint_policy_api.create_policy_association(
+ policy_id, endpoint_id=endpoint_id)
+
+ @controller.protected()
+ def check_policy_association_for_endpoint(self, context,
+ policy_id, endpoint_id):
+ """Check an association between a policy and an endpoint."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_endpoint(endpoint_id)
+ self.endpoint_policy_api.check_policy_association(
+ policy_id, endpoint_id=endpoint_id)
+
+ @controller.protected()
+ def delete_policy_association_for_endpoint(self, context,
+ policy_id, endpoint_id):
+ """Delete an association between a policy and an endpoint."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_endpoint(endpoint_id)
+ self.endpoint_policy_api.delete_policy_association(
+ policy_id, endpoint_id=endpoint_id)
+
+ @controller.protected()
+ def create_policy_association_for_service(self, context,
+ policy_id, service_id):
+ """Create an association between a policy and a service."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_service(service_id)
+ self.endpoint_policy_api.create_policy_association(
+ policy_id, service_id=service_id)
+
+ @controller.protected()
+ def check_policy_association_for_service(self, context,
+ policy_id, service_id):
+ """Check an association between a policy and a service."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_service(service_id)
+ self.endpoint_policy_api.check_policy_association(
+ policy_id, service_id=service_id)
+
+ @controller.protected()
+ def delete_policy_association_for_service(self, context,
+ policy_id, service_id):
+ """Delete an association between a policy and a service."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_service(service_id)
+ self.endpoint_policy_api.delete_policy_association(
+ policy_id, service_id=service_id)
+
+ @controller.protected()
+ def create_policy_association_for_region_and_service(
+ self, context, policy_id, service_id, region_id):
+ """Create an association between a policy and region+service."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_service(service_id)
+ self.catalog_api.get_region(region_id)
+ self.endpoint_policy_api.create_policy_association(
+ policy_id, service_id=service_id, region_id=region_id)
+
+ @controller.protected()
+ def check_policy_association_for_region_and_service(
+ self, context, policy_id, service_id, region_id):
+ """Check an association between a policy and region+service."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_service(service_id)
+ self.catalog_api.get_region(region_id)
+ self.endpoint_policy_api.check_policy_association(
+ policy_id, service_id=service_id, region_id=region_id)
+
+ @controller.protected()
+ def delete_policy_association_for_region_and_service(
+ self, context, policy_id, service_id, region_id):
+ """Delete an association between a policy and region+service."""
+ self.policy_api.get_policy(policy_id)
+ self.catalog_api.get_service(service_id)
+ self.catalog_api.get_region(region_id)
+ self.endpoint_policy_api.delete_policy_association(
+ policy_id, service_id=service_id, region_id=region_id)
+
+ @controller.protected()
+ def get_policy_for_endpoint(self, context, endpoint_id):
+ """Get the effective policy for an endpoint."""
+ self.catalog_api.get_endpoint(endpoint_id)
+ ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id)
+ # NOTE(henry-nash): since the collection and member for this class is
+ # set to endpoints, we have to handle wrapping this policy entity
+ # ourselves.
+ self._add_self_referential_link(context, ref)
+ return {'policy': ref}
+
+ # NOTE(henry-nash): As in the catalog controller, we must ensure that the
+ # legacy_endpoint_id does not escape.
+
+ @classmethod
+ def filter_endpoint(cls, ref):
+ if 'legacy_endpoint_id' in ref:
+ ref.pop('legacy_endpoint_id')
+ return ref
+
+ @classmethod
+ def wrap_member(cls, context, ref):
+ ref = cls.filter_endpoint(ref)
+ return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref)
+
+ @controller.protected()
+ def list_endpoints_for_policy(self, context, policy_id):
+ """List endpoints with the effective association to a policy."""
+ self.policy_api.get_policy(policy_id)
+ refs = self.endpoint_policy_api.list_endpoints_for_policy(policy_id)
+ return EndpointPolicyV3Controller.wrap_collection(context, refs)
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/core.py b/keystone-moon/keystone/contrib/endpoint_policy/core.py
new file mode 100644
index 00000000..1aa03267
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/core.py
@@ -0,0 +1,430 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import abc
+
+from oslo_config import cfg
+from oslo_log import log
+import six
+
+from keystone.common import dependency
+from keystone.common import manager
+from keystone import exception
+from keystone.i18n import _, _LE, _LW
+
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+
+
+@dependency.provider('endpoint_policy_api')
+@dependency.requires('catalog_api', 'policy_api')
+class Manager(manager.Manager):
+ """Default pivot point for the Endpoint Policy backend.
+
+ See :mod:`keystone.common.manager.Manager` for more details on how this
+ dynamically calls the backend.
+
+ """
+
+ def __init__(self):
+ super(Manager, self).__init__(CONF.endpoint_policy.driver)
+
+ def _assert_valid_association(self, endpoint_id, service_id, region_id):
+ """Assert that the association is supported.
+
+ There are three types of association supported:
+
+ - Endpoint (in which case service and region must be None)
+ - Service and region (in which endpoint must be None)
+ - Service (in which case endpoint and region must be None)
+
+ """
+ if (endpoint_id is not None and
+ service_id is None and region_id is None):
+ return
+ if (service_id is not None and region_id is not None and
+ endpoint_id is None):
+ return
+ if (service_id is not None and
+ endpoint_id is None and region_id is None):
+ return
+
+ raise exception.InvalidPolicyAssociation(endpoint_id=endpoint_id,
+ service_id=service_id,
+ region_id=region_id)
+
+ def create_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ self._assert_valid_association(endpoint_id, service_id, region_id)
+ self.driver.create_policy_association(policy_id, endpoint_id,
+ service_id, region_id)
+
+ def check_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ self._assert_valid_association(endpoint_id, service_id, region_id)
+ self.driver.check_policy_association(policy_id, endpoint_id,
+ service_id, region_id)
+
+ def delete_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ self._assert_valid_association(endpoint_id, service_id, region_id)
+ self.driver.delete_policy_association(policy_id, endpoint_id,
+ service_id, region_id)
+
+ def list_endpoints_for_policy(self, policy_id):
+
+ def _get_endpoint(endpoint_id, policy_id):
+ try:
+ return self.catalog_api.get_endpoint(endpoint_id)
+ except exception.EndpointNotFound:
+ msg = _LW('Endpoint %(endpoint_id)s referenced in '
+ 'association for policy %(policy_id)s not found.')
+ LOG.warning(msg, {'policy_id': policy_id,
+ 'endpoint_id': endpoint_id})
+ raise
+
+ def _get_endpoints_for_service(service_id, endpoints):
+ # TODO(henry-nash): Consider optimizing this in the future by
+ # adding an explicit list_endpoints_for_service to the catalog API.
+ return [ep for ep in endpoints if ep['service_id'] == service_id]
+
+ def _get_endpoints_for_service_and_region(
+ service_id, region_id, endpoints, regions):
+ # TODO(henry-nash): Consider optimizing this in the future.
+ # The lack of a two-way pointer in the region tree structure
+ # makes this somewhat inefficient.
+
+ def _recursively_get_endpoints_for_region(
+ region_id, service_id, endpoint_list, region_list,
+ endpoints_found, regions_examined):
+ """Recursively search down a region tree for endpoints.
+
+ :param region_id: the point in the tree to examine
+ :param service_id: the service we are interested in
+ :param endpoint_list: list of all endpoints
+ :param region_list: list of all regions
+ :param endpoints_found: list of matching endpoints found so
+ far - which will be updated if more are
+ found in this iteration
+ :param regions_examined: list of regions we have already looked
+ at - used to spot illegal circular
+ references in the tree to avoid never
+ completing search
+ :returns: list of endpoints that match
+
+ """
+
+ if region_id in regions_examined:
+ msg = _LE('Circular reference or a repeated entry found '
+ 'in region tree - %(region_id)s.')
+ LOG.error(msg, {'region_id': ref.region_id})
+ return
+
+ regions_examined.append(region_id)
+ endpoints_found += (
+ [ep for ep in endpoint_list if
+ ep['service_id'] == service_id and
+ ep['region_id'] == region_id])
+
+ for region in region_list:
+ if region['parent_region_id'] == region_id:
+ _recursively_get_endpoints_for_region(
+ region['id'], service_id, endpoints, regions,
+ endpoints_found, regions_examined)
+
+ endpoints_found = []
+ regions_examined = []
+
+ # Now walk down the region tree
+ _recursively_get_endpoints_for_region(
+ region_id, service_id, endpoints, regions,
+ endpoints_found, regions_examined)
+
+ return endpoints_found
+
+ matching_endpoints = []
+ endpoints = self.catalog_api.list_endpoints()
+ regions = self.catalog_api.list_regions()
+ for ref in self.driver.list_associations_for_policy(policy_id):
+ if ref.get('endpoint_id') is not None:
+ matching_endpoints.append(
+ _get_endpoint(ref['endpoint_id'], policy_id))
+ continue
+
+ if (ref.get('service_id') is not None and
+ ref.get('region_id') is None):
+ matching_endpoints += _get_endpoints_for_service(
+ ref['service_id'], endpoints)
+ continue
+
+ if (ref.get('service_id') is not None and
+ ref.get('region_id') is not None):
+ matching_endpoints += (
+ _get_endpoints_for_service_and_region(
+ ref['service_id'], ref['region_id'],
+ endpoints, regions))
+ continue
+
+ msg = _LW('Unsupported policy association found - '
+ 'Policy %(policy_id)s, Endpoint %(endpoint_id)s, '
+ 'Service %(service_id)s, Region %(region_id)s, ')
+ LOG.warning(msg, {'policy_id': policy_id,
+ 'endpoint_id': ref['endpoint_id'],
+ 'service_id': ref['service_id'],
+ 'region_id': ref['region_id']})
+
+ return matching_endpoints
+
+ def get_policy_for_endpoint(self, endpoint_id):
+
+ def _get_policy(policy_id, endpoint_id):
+ try:
+ return self.policy_api.get_policy(policy_id)
+ except exception.PolicyNotFound:
+ msg = _LW('Policy %(policy_id)s referenced in association '
+ 'for endpoint %(endpoint_id)s not found.')
+ LOG.warning(msg, {'policy_id': policy_id,
+ 'endpoint_id': endpoint_id})
+ raise
+
+ def _look_for_policy_for_region_and_service(endpoint):
+ """Look in the region and its parents for a policy.
+
+ Examine the region of the endpoint for a policy appropriate for
+ the service of the endpoint. If there isn't a match, then chase up
+ the region tree to find one.
+
+ """
+ region_id = endpoint['region_id']
+ regions_examined = []
+ while region_id is not None:
+ try:
+ ref = self.driver.get_policy_association(
+ service_id=endpoint['service_id'],
+ region_id=region_id)
+ return ref['policy_id']
+ except exception.PolicyAssociationNotFound:
+ pass
+
+ # There wasn't one for that region & service, let's
+ # chase up the region tree
+ regions_examined.append(region_id)
+ region = self.catalog_api.get_region(region_id)
+ region_id = None
+ if region.get('parent_region_id') is not None:
+ region_id = region['parent_region_id']
+ if region_id in regions_examined:
+ msg = _LE('Circular reference or a repeated entry '
+ 'found in region tree - %(region_id)s.')
+ LOG.error(msg, {'region_id': region_id})
+ break
+
+ # First let's see if there is a policy explicitly defined for
+ # this endpoint.
+
+ try:
+ ref = self.driver.get_policy_association(endpoint_id=endpoint_id)
+ return _get_policy(ref['policy_id'], endpoint_id)
+ except exception.PolicyAssociationNotFound:
+ pass
+
+ # There wasn't a policy explicitly defined for this endpoint, so
+ # now let's see if there is one for the Region & Service.
+
+ endpoint = self.catalog_api.get_endpoint(endpoint_id)
+ policy_id = _look_for_policy_for_region_and_service(endpoint)
+ if policy_id is not None:
+ return _get_policy(policy_id, endpoint_id)
+
+ # Finally, just check if there is one for the service.
+ try:
+ ref = self.driver.get_policy_association(
+ service_id=endpoint['service_id'])
+ return _get_policy(ref['policy_id'], endpoint_id)
+ except exception.PolicyAssociationNotFound:
+ pass
+
+ msg = _('No policy is associated with endpoint '
+ '%(endpoint_id)s.') % {'endpoint_id': endpoint_id}
+ raise exception.NotFound(msg)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Driver(object):
+ """Interface description for an Endpoint Policy driver."""
+
+ @abc.abstractmethod
+ def create_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ """Creates a policy association.
+
+ :param policy_id: identity of policy that is being associated
+ :type policy_id: string
+ :param endpoint_id: identity of endpoint to associate
+ :type endpoint_id: string
+ :param service_id: identity of the service to associate
+ :type service_id: string
+ :param region_id: identity of the region to associate
+ :type region_id: string
+ :returns: None
+
+ There are three types of association permitted:
+
+ - Endpoint (in which case service and region must be None)
+ - Service and region (in which endpoint must be None)
+ - Service (in which case endpoint and region must be None)
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def check_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ """Checks existence a policy association.
+
+ :param policy_id: identity of policy that is being associated
+ :type policy_id: string
+ :param endpoint_id: identity of endpoint to associate
+ :type endpoint_id: string
+ :param service_id: identity of the service to associate
+ :type service_id: string
+ :param region_id: identity of the region to associate
+ :type region_id: string
+ :raises: keystone.exception.PolicyAssociationNotFound if there is no
+ match for the specified association
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_policy_association(self, policy_id, endpoint_id=None,
+ service_id=None, region_id=None):
+ """Deletes a policy association.
+
+ :param policy_id: identity of policy that is being associated
+ :type policy_id: string
+ :param endpoint_id: identity of endpoint to associate
+ :type endpoint_id: string
+ :param service_id: identity of the service to associate
+ :type service_id: string
+ :param region_id: identity of the region to associate
+ :type region_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_policy_association(self, endpoint_id=None,
+ service_id=None, region_id=None):
+ """Gets the policy for an explicit association.
+
+ This method is not exposed as a public API, but is used by
+ get_policy_for_endpoint().
+
+ :param endpoint_id: identity of endpoint
+ :type endpoint_id: string
+ :param service_id: identity of the service
+ :type service_id: string
+ :param region_id: identity of the region
+ :type region_id: string
+ :raises: keystone.exception.PolicyAssociationNotFound if there is no
+ match for the specified association
+ :returns: dict containing policy_id
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_associations_for_policy(self, policy_id):
+ """List the associations for a policy.
+
+ This method is not exposed as a public API, but is used by
+ list_endpoints_for_policy().
+
+ :param policy_id: identity of policy
+ :type policy_id: string
+ :returns: List of association dicts
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_endpoints_for_policy(self, policy_id):
+ """List all the endpoints using a given policy.
+
+ :param policy_id: identity of policy that is being associated
+ :type policy_id: string
+ :returns: list of endpoints that have an effective association with
+ that policy
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_policy_for_endpoint(self, endpoint_id):
+ """Get the appropriate policy for a given endpoint.
+
+ :param endpoint_id: identity of endpoint
+ :type endpoint_id: string
+ :returns: Policy entity for the endpoint
+
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_association_by_endpoint(self, endpoint_id):
+ """Removes all the policy associations with the specific endpoint.
+
+ :param endpoint_id: identity of endpoint to check
+ :type endpoint_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_association_by_service(self, service_id):
+ """Removes all the policy associations with the specific service.
+
+ :param service_id: identity of endpoint to check
+ :type service_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_association_by_region(self, region_id):
+ """Removes all the policy associations with the specific region.
+
+ :param region_id: identity of endpoint to check
+ :type region_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_association_by_policy(self, policy_id):
+ """Removes all the policy associations with the specific policy.
+
+ :param policy_id: identity of endpoint to check
+ :type policy_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/__init__.py
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg
new file mode 100644
index 00000000..62895d6f
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg
@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=endpoint_policy
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py
new file mode 100644
index 00000000..c77e4380
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py
@@ -0,0 +1,48 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine; bind
+ # migrate_engine to your metadata
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ endpoint_policy_table = sql.Table(
+ 'policy_association',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('policy_id', sql.String(64),
+ nullable=False),
+ sql.Column('endpoint_id', sql.String(64),
+ nullable=True),
+ sql.Column('service_id', sql.String(64),
+ nullable=True),
+ sql.Column('region_id', sql.String(64),
+ nullable=True),
+ sql.UniqueConstraint('endpoint_id', 'service_id', 'region_id'),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+
+ endpoint_policy_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ # Operations to reverse the above upgrade go here.
+ table = sql.Table('policy_association', meta, autoload=True)
+ table.drop()
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/__init__.py
diff --git a/keystone-moon/keystone/contrib/endpoint_policy/routers.py b/keystone-moon/keystone/contrib/endpoint_policy/routers.py
new file mode 100644
index 00000000..999d1eed
--- /dev/null
+++ b/keystone-moon/keystone/contrib/endpoint_policy/routers.py
@@ -0,0 +1,85 @@
+# Copyright 2014 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.endpoint_policy import controllers
+
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-ENDPOINT-POLICY', extension_version='1.0')
+
+
+class EndpointPolicyExtension(wsgi.V3ExtensionRouter):
+
+ PATH_PREFIX = '/OS-ENDPOINT-POLICY'
+
+ def add_routes(self, mapper):
+ endpoint_policy_controller = controllers.EndpointPolicyV3Controller()
+
+ self._add_resource(
+ mapper, endpoint_policy_controller,
+ path='/endpoints/{endpoint_id}' + self.PATH_PREFIX + '/policy',
+ get_head_action='get_policy_for_endpoint',
+ rel=build_resource_relation(resource_name='endpoint_policy'),
+ path_vars={'endpoint_id': json_home.Parameters.ENDPOINT_ID})
+ self._add_resource(
+ mapper, endpoint_policy_controller,
+ path='/policies/{policy_id}' + self.PATH_PREFIX + '/endpoints',
+ get_action='list_endpoints_for_policy',
+ rel=build_resource_relation(resource_name='policy_endpoints'),
+ path_vars={'policy_id': json_home.Parameters.POLICY_ID})
+ self._add_resource(
+ mapper, endpoint_policy_controller,
+ path=('/policies/{policy_id}' + self.PATH_PREFIX +
+ '/endpoints/{endpoint_id}'),
+ get_head_action='check_policy_association_for_endpoint',
+ put_action='create_policy_association_for_endpoint',
+ delete_action='delete_policy_association_for_endpoint',
+ rel=build_resource_relation(
+ resource_name='endpoint_policy_association'),
+ path_vars={
+ 'policy_id': json_home.Parameters.POLICY_ID,
+ 'endpoint_id': json_home.Parameters.ENDPOINT_ID,
+ })
+ self._add_resource(
+ mapper, endpoint_policy_controller,
+ path=('/policies/{policy_id}' + self.PATH_PREFIX +
+ '/services/{service_id}'),
+ get_head_action='check_policy_association_for_service',
+ put_action='create_policy_association_for_service',
+ delete_action='delete_policy_association_for_service',
+ rel=build_resource_relation(
+ resource_name='service_policy_association'),
+ path_vars={
+ 'policy_id': json_home.Parameters.POLICY_ID,
+ 'service_id': json_home.Parameters.SERVICE_ID,
+ })
+ self._add_resource(
+ mapper, endpoint_policy_controller,
+ path=('/policies/{policy_id}' + self.PATH_PREFIX +
+ '/services/{service_id}/regions/{region_id}'),
+ get_head_action='check_policy_association_for_region_and_service',
+ put_action='create_policy_association_for_region_and_service',
+ delete_action='delete_policy_association_for_region_and_service',
+ rel=build_resource_relation(
+ resource_name='region_and_service_policy_association'),
+ path_vars={
+ 'policy_id': json_home.Parameters.POLICY_ID,
+ 'service_id': json_home.Parameters.SERVICE_ID,
+ 'region_id': json_home.Parameters.REGION_ID,
+ })
diff --git a/keystone-moon/keystone/contrib/example/__init__.py b/keystone-moon/keystone/contrib/example/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/__init__.py
diff --git a/keystone-moon/keystone/contrib/example/configuration.rst b/keystone-moon/keystone/contrib/example/configuration.rst
new file mode 100644
index 00000000..979d3457
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/configuration.rst
@@ -0,0 +1,31 @@
+..
+ Copyright 2013 OpenStack, Foundation
+ All Rights Reserved.
+
+ Licensed under the Apache License, Version 2.0 (the "License"); you may
+ not use this file except in compliance with the License. You may obtain
+ a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ License for the specific language governing permissions and limitations
+ under the License.
+
+=================
+Extension Example
+=================
+
+Please describe here in details how to enable your extension:
+
+1. Add the required fields and values in the ``[example]`` section
+ in ``keystone.conf``.
+
+2. Optional: add the required ``filter`` to the ``pipeline`` in ``keystone-paste.ini``
+
+3. Optional: create the extension tables if using the provided sql backend. Example::
+
+
+ ./bin/keystone-manage db_sync --extension example \ No newline at end of file
diff --git a/keystone-moon/keystone/contrib/example/controllers.py b/keystone-moon/keystone/contrib/example/controllers.py
new file mode 100644
index 00000000..95b3e82f
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/controllers.py
@@ -0,0 +1,26 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from keystone.common import controller
+from keystone.common import dependency
+
+
+@dependency.requires('example_api')
+class ExampleV3Controller(controller.V3Controller):
+
+ @controller.protected()
+ def example_get(self, context):
+ """Description of the controller logic."""
+ self.example_api.do_something(context)
diff --git a/keystone-moon/keystone/contrib/example/core.py b/keystone-moon/keystone/contrib/example/core.py
new file mode 100644
index 00000000..6e85c7f7
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/core.py
@@ -0,0 +1,92 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_log import log
+
+from keystone.common import dependency
+from keystone.common import manager
+from keystone import exception
+from keystone.i18n import _LI
+from keystone import notifications
+
+
+LOG = log.getLogger(__name__)
+
+
+@dependency.provider('example_api')
+class ExampleManager(manager.Manager):
+ """Example Manager.
+
+ See :mod:`keystone.common.manager.Manager` for more details on
+ how this dynamically calls the backend.
+
+ """
+
+ def __init__(self):
+ # The following is an example of event callbacks. In this setup,
+ # ExampleManager's data model is depended on project's data model.
+ # It must create additional aggregates when a new project is created,
+ # and it must cleanup data related to the project whenever a project
+ # has been deleted.
+ #
+ # In this example, the project_deleted_callback will be invoked
+ # whenever a project has been deleted. Similarly, the
+ # project_created_callback will be invoked whenever a new project is
+ # created.
+
+ # This information is used when the @dependency.provider decorator acts
+ # on the class.
+ self.event_callbacks = {
+ notifications.ACTIONS.deleted: {
+ 'project': [self.project_deleted_callback],
+ },
+ notifications.ACTIONS.created: {
+ 'project': [self.project_created_callback],
+ },
+ }
+ super(ExampleManager, self).__init__(
+ 'keystone.contrib.example.core.ExampleDriver')
+
+ def project_deleted_callback(self, service, resource_type, operation,
+ payload):
+ # The code below is merely an example.
+ msg = _LI('Received the following notification: service %(service)s, '
+ 'resource_type: %(resource_type)s, operation %(operation)s '
+ 'payload %(payload)s')
+ LOG.info(msg, {'service': service, 'resource_type': resource_type,
+ 'operation': operation, 'payload': payload})
+
+ def project_created_callback(self, service, resource_type, operation,
+ payload):
+ # The code below is merely an example.
+ msg = _LI('Received the following notification: service %(service)s, '
+ 'resource_type: %(resource_type)s, operation %(operation)s '
+ 'payload %(payload)s')
+ LOG.info(msg, {'service': service, 'resource_type': resource_type,
+ 'operation': operation, 'payload': payload})
+
+
+class ExampleDriver(object):
+ """Interface description for Example driver."""
+
+ def do_something(self, data):
+ """Do something
+
+ :param data: example data
+ :type data: string
+ :raises: keystone.exception,
+ :returns: None.
+
+ """
+ raise exception.NotImplemented()
diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/example/migrate_repo/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/migrate_repo/__init__.py
diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg
new file mode 100644
index 00000000..5b1b1c0a
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg
@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=example
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False
diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py b/keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py
new file mode 100644
index 00000000..10b7ccc7
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py
@@ -0,0 +1,43 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine; bind
+ # migrate_engine to your metadata
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ # catalog
+
+ service_table = sql.Table(
+ 'example',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('type', sql.String(255)),
+ sql.Column('extra', sql.Text()))
+ service_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ # Operations to reverse the above upgrade go here.
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ tables = ['example']
+ for t in tables:
+ table = sql.Table(t, meta, autoload=True)
+ table.drop(migrate_engine, checkfirst=True)
diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/example/migrate_repo/versions/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/migrate_repo/versions/__init__.py
diff --git a/keystone-moon/keystone/contrib/example/routers.py b/keystone-moon/keystone/contrib/example/routers.py
new file mode 100644
index 00000000..30cffe1b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/example/routers.py
@@ -0,0 +1,38 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.example import controllers
+
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-EXAMPLE', extension_version='1.0')
+
+
+class ExampleRouter(wsgi.V3ExtensionRouter):
+
+ PATH_PREFIX = '/OS-EXAMPLE'
+
+ def add_routes(self, mapper):
+ example_controller = controllers.ExampleV3Controller()
+
+ self._add_resource(
+ mapper, example_controller,
+ path=self.PATH_PREFIX + '/example',
+ get_action='do_something',
+ rel=build_resource_relation(resource_name='example'))
diff --git a/keystone-moon/keystone/contrib/federation/__init__.py b/keystone-moon/keystone/contrib/federation/__init__.py
new file mode 100644
index 00000000..57c9e42c
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.federation.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/federation/backends/__init__.py b/keystone-moon/keystone/contrib/federation/backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/backends/__init__.py
diff --git a/keystone-moon/keystone/contrib/federation/backends/sql.py b/keystone-moon/keystone/contrib/federation/backends/sql.py
new file mode 100644
index 00000000..f2c124d0
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/backends/sql.py
@@ -0,0 +1,315 @@
+# Copyright 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_serialization import jsonutils
+
+from keystone.common import sql
+from keystone.contrib.federation import core
+from keystone import exception
+
+
+class FederationProtocolModel(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'federation_protocol'
+ attributes = ['id', 'idp_id', 'mapping_id']
+ mutable_attributes = frozenset(['mapping_id'])
+
+ id = sql.Column(sql.String(64), primary_key=True)
+ idp_id = sql.Column(sql.String(64), sql.ForeignKey('identity_provider.id',
+ ondelete='CASCADE'), primary_key=True)
+ mapping_id = sql.Column(sql.String(64), nullable=False)
+
+ @classmethod
+ def from_dict(cls, dictionary):
+ new_dictionary = dictionary.copy()
+ return cls(**new_dictionary)
+
+ def to_dict(self):
+ """Return a dictionary with model's attributes."""
+ d = dict()
+ for attr in self.__class__.attributes:
+ d[attr] = getattr(self, attr)
+ return d
+
+
+class IdentityProviderModel(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'identity_provider'
+ attributes = ['id', 'remote_id', 'enabled', 'description']
+ mutable_attributes = frozenset(['description', 'enabled', 'remote_id'])
+
+ id = sql.Column(sql.String(64), primary_key=True)
+ remote_id = sql.Column(sql.String(256), nullable=True)
+ enabled = sql.Column(sql.Boolean, nullable=False)
+ description = sql.Column(sql.Text(), nullable=True)
+
+ @classmethod
+ def from_dict(cls, dictionary):
+ new_dictionary = dictionary.copy()
+ return cls(**new_dictionary)
+
+ def to_dict(self):
+ """Return a dictionary with model's attributes."""
+ d = dict()
+ for attr in self.__class__.attributes:
+ d[attr] = getattr(self, attr)
+ return d
+
+
+class MappingModel(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'mapping'
+ attributes = ['id', 'rules']
+
+ id = sql.Column(sql.String(64), primary_key=True)
+ rules = sql.Column(sql.JsonBlob(), nullable=False)
+
+ @classmethod
+ def from_dict(cls, dictionary):
+ new_dictionary = dictionary.copy()
+ return cls(**new_dictionary)
+
+ def to_dict(self):
+ """Return a dictionary with model's attributes."""
+ d = dict()
+ for attr in self.__class__.attributes:
+ d[attr] = getattr(self, attr)
+ return d
+
+
+class ServiceProviderModel(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'service_provider'
+ attributes = ['auth_url', 'id', 'enabled', 'description', 'sp_url']
+ mutable_attributes = frozenset(['auth_url', 'description', 'enabled',
+ 'sp_url'])
+
+ id = sql.Column(sql.String(64), primary_key=True)
+ enabled = sql.Column(sql.Boolean, nullable=False)
+ description = sql.Column(sql.Text(), nullable=True)
+ auth_url = sql.Column(sql.String(256), nullable=False)
+ sp_url = sql.Column(sql.String(256), nullable=False)
+
+ @classmethod
+ def from_dict(cls, dictionary):
+ new_dictionary = dictionary.copy()
+ return cls(**new_dictionary)
+
+ def to_dict(self):
+ """Return a dictionary with model's attributes."""
+ d = dict()
+ for attr in self.__class__.attributes:
+ d[attr] = getattr(self, attr)
+ return d
+
+
+class Federation(core.Driver):
+
+ # Identity Provider CRUD
+ @sql.handle_conflicts(conflict_type='identity_provider')
+ def create_idp(self, idp_id, idp):
+ idp['id'] = idp_id
+ with sql.transaction() as session:
+ idp_ref = IdentityProviderModel.from_dict(idp)
+ session.add(idp_ref)
+ return idp_ref.to_dict()
+
+ def delete_idp(self, idp_id):
+ with sql.transaction() as session:
+ idp_ref = self._get_idp(session, idp_id)
+ session.delete(idp_ref)
+
+ def _get_idp(self, session, idp_id):
+ idp_ref = session.query(IdentityProviderModel).get(idp_id)
+ if not idp_ref:
+ raise exception.IdentityProviderNotFound(idp_id=idp_id)
+ return idp_ref
+
+ def _get_idp_from_remote_id(self, session, remote_id):
+ q = session.query(IdentityProviderModel)
+ q = q.filter_by(remote_id=remote_id)
+ try:
+ return q.one()
+ except sql.NotFound:
+ raise exception.IdentityProviderNotFound(idp_id=remote_id)
+
+ def list_idps(self):
+ with sql.transaction() as session:
+ idps = session.query(IdentityProviderModel)
+ idps_list = [idp.to_dict() for idp in idps]
+ return idps_list
+
+ def get_idp(self, idp_id):
+ with sql.transaction() as session:
+ idp_ref = self._get_idp(session, idp_id)
+ return idp_ref.to_dict()
+
+ def get_idp_from_remote_id(self, remote_id):
+ with sql.transaction() as session:
+ idp_ref = self._get_idp_from_remote_id(session, remote_id)
+ return idp_ref.to_dict()
+
+ def update_idp(self, idp_id, idp):
+ with sql.transaction() as session:
+ idp_ref = self._get_idp(session, idp_id)
+ old_idp = idp_ref.to_dict()
+ old_idp.update(idp)
+ new_idp = IdentityProviderModel.from_dict(old_idp)
+ for attr in IdentityProviderModel.mutable_attributes:
+ setattr(idp_ref, attr, getattr(new_idp, attr))
+ return idp_ref.to_dict()
+
+ # Protocol CRUD
+ def _get_protocol(self, session, idp_id, protocol_id):
+ q = session.query(FederationProtocolModel)
+ q = q.filter_by(id=protocol_id, idp_id=idp_id)
+ try:
+ return q.one()
+ except sql.NotFound:
+ kwargs = {'protocol_id': protocol_id,
+ 'idp_id': idp_id}
+ raise exception.FederatedProtocolNotFound(**kwargs)
+
+ @sql.handle_conflicts(conflict_type='federation_protocol')
+ def create_protocol(self, idp_id, protocol_id, protocol):
+ protocol['id'] = protocol_id
+ protocol['idp_id'] = idp_id
+ with sql.transaction() as session:
+ self._get_idp(session, idp_id)
+ protocol_ref = FederationProtocolModel.from_dict(protocol)
+ session.add(protocol_ref)
+ return protocol_ref.to_dict()
+
+ def update_protocol(self, idp_id, protocol_id, protocol):
+ with sql.transaction() as session:
+ proto_ref = self._get_protocol(session, idp_id, protocol_id)
+ old_proto = proto_ref.to_dict()
+ old_proto.update(protocol)
+ new_proto = FederationProtocolModel.from_dict(old_proto)
+ for attr in FederationProtocolModel.mutable_attributes:
+ setattr(proto_ref, attr, getattr(new_proto, attr))
+ return proto_ref.to_dict()
+
+ def get_protocol(self, idp_id, protocol_id):
+ with sql.transaction() as session:
+ protocol_ref = self._get_protocol(session, idp_id, protocol_id)
+ return protocol_ref.to_dict()
+
+ def list_protocols(self, idp_id):
+ with sql.transaction() as session:
+ q = session.query(FederationProtocolModel)
+ q = q.filter_by(idp_id=idp_id)
+ protocols = [protocol.to_dict() for protocol in q]
+ return protocols
+
+ def delete_protocol(self, idp_id, protocol_id):
+ with sql.transaction() as session:
+ key_ref = self._get_protocol(session, idp_id, protocol_id)
+ session.delete(key_ref)
+
+ # Mapping CRUD
+ def _get_mapping(self, session, mapping_id):
+ mapping_ref = session.query(MappingModel).get(mapping_id)
+ if not mapping_ref:
+ raise exception.MappingNotFound(mapping_id=mapping_id)
+ return mapping_ref
+
+ @sql.handle_conflicts(conflict_type='mapping')
+ def create_mapping(self, mapping_id, mapping):
+ ref = {}
+ ref['id'] = mapping_id
+ ref['rules'] = jsonutils.dumps(mapping.get('rules'))
+ with sql.transaction() as session:
+ mapping_ref = MappingModel.from_dict(ref)
+ session.add(mapping_ref)
+ return mapping_ref.to_dict()
+
+ def delete_mapping(self, mapping_id):
+ with sql.transaction() as session:
+ mapping_ref = self._get_mapping(session, mapping_id)
+ session.delete(mapping_ref)
+
+ def list_mappings(self):
+ with sql.transaction() as session:
+ mappings = session.query(MappingModel)
+ return [x.to_dict() for x in mappings]
+
+ def get_mapping(self, mapping_id):
+ with sql.transaction() as session:
+ mapping_ref = self._get_mapping(session, mapping_id)
+ return mapping_ref.to_dict()
+
+ @sql.handle_conflicts(conflict_type='mapping')
+ def update_mapping(self, mapping_id, mapping):
+ ref = {}
+ ref['id'] = mapping_id
+ ref['rules'] = jsonutils.dumps(mapping.get('rules'))
+ with sql.transaction() as session:
+ mapping_ref = self._get_mapping(session, mapping_id)
+ old_mapping = mapping_ref.to_dict()
+ old_mapping.update(ref)
+ new_mapping = MappingModel.from_dict(old_mapping)
+ for attr in MappingModel.attributes:
+ setattr(mapping_ref, attr, getattr(new_mapping, attr))
+ return mapping_ref.to_dict()
+
+ def get_mapping_from_idp_and_protocol(self, idp_id, protocol_id):
+ with sql.transaction() as session:
+ protocol_ref = self._get_protocol(session, idp_id, protocol_id)
+ mapping_id = protocol_ref.mapping_id
+ mapping_ref = self._get_mapping(session, mapping_id)
+ return mapping_ref.to_dict()
+
+ # Service Provider CRUD
+ @sql.handle_conflicts(conflict_type='service_provider')
+ def create_sp(self, sp_id, sp):
+ sp['id'] = sp_id
+ with sql.transaction() as session:
+ sp_ref = ServiceProviderModel.from_dict(sp)
+ session.add(sp_ref)
+ return sp_ref.to_dict()
+
+ def delete_sp(self, sp_id):
+ with sql.transaction() as session:
+ sp_ref = self._get_sp(session, sp_id)
+ session.delete(sp_ref)
+
+ def _get_sp(self, session, sp_id):
+ sp_ref = session.query(ServiceProviderModel).get(sp_id)
+ if not sp_ref:
+ raise exception.ServiceProviderNotFound(sp_id=sp_id)
+ return sp_ref
+
+ def list_sps(self):
+ with sql.transaction() as session:
+ sps = session.query(ServiceProviderModel)
+ sps_list = [sp.to_dict() for sp in sps]
+ return sps_list
+
+ def get_sp(self, sp_id):
+ with sql.transaction() as session:
+ sp_ref = self._get_sp(session, sp_id)
+ return sp_ref.to_dict()
+
+ def update_sp(self, sp_id, sp):
+ with sql.transaction() as session:
+ sp_ref = self._get_sp(session, sp_id)
+ old_sp = sp_ref.to_dict()
+ old_sp.update(sp)
+ new_sp = ServiceProviderModel.from_dict(old_sp)
+ for attr in ServiceProviderModel.mutable_attributes:
+ setattr(sp_ref, attr, getattr(new_sp, attr))
+ return sp_ref.to_dict()
+
+ def get_enabled_service_providers(self):
+ with sql.transaction() as session:
+ service_providers = session.query(ServiceProviderModel)
+ service_providers = service_providers.filter_by(enabled=True)
+ return service_providers
diff --git a/keystone-moon/keystone/contrib/federation/controllers.py b/keystone-moon/keystone/contrib/federation/controllers.py
new file mode 100644
index 00000000..6066a33f
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/controllers.py
@@ -0,0 +1,457 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Extensions supporting Federation."""
+
+import string
+
+from oslo_config import cfg
+from oslo_log import log
+import six
+from six.moves import urllib
+import webob
+
+from keystone.auth import controllers as auth_controllers
+from keystone.common import authorization
+from keystone.common import controller
+from keystone.common import dependency
+from keystone.common import validation
+from keystone.common import wsgi
+from keystone.contrib.federation import idp as keystone_idp
+from keystone.contrib.federation import schema
+from keystone.contrib.federation import utils
+from keystone import exception
+from keystone.i18n import _
+from keystone.models import token_model
+
+
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+
+
+class _ControllerBase(controller.V3Controller):
+ """Base behaviors for federation controllers."""
+
+ @classmethod
+ def base_url(cls, context, path=None):
+ """Construct a path and pass it to V3Controller.base_url method."""
+
+ path = '/OS-FEDERATION/' + cls.collection_name
+ return super(_ControllerBase, cls).base_url(context, path=path)
+
+
+@dependency.requires('federation_api')
+class IdentityProvider(_ControllerBase):
+ """Identity Provider representation."""
+ collection_name = 'identity_providers'
+ member_name = 'identity_provider'
+
+ _mutable_parameters = frozenset(['description', 'enabled', 'remote_id'])
+ _public_parameters = frozenset(['id', 'enabled', 'description',
+ 'remote_id', 'links'
+ ])
+
+ @classmethod
+ def _add_related_links(cls, context, ref):
+ """Add URLs for entities related with Identity Provider.
+
+ Add URLs pointing to:
+ - protocols tied to the Identity Provider
+
+ """
+ ref.setdefault('links', {})
+ base_path = ref['links'].get('self')
+ if base_path is None:
+ base_path = '/'.join([IdentityProvider.base_url(context),
+ ref['id']])
+ for name in ['protocols']:
+ ref['links'][name] = '/'.join([base_path, name])
+
+ @classmethod
+ def _add_self_referential_link(cls, context, ref):
+ id = ref.get('id')
+ self_path = '/'.join([cls.base_url(context), id])
+ ref.setdefault('links', {})
+ ref['links']['self'] = self_path
+
+ @classmethod
+ def wrap_member(cls, context, ref):
+ cls._add_self_referential_link(context, ref)
+ cls._add_related_links(context, ref)
+ ref = cls.filter_params(ref)
+ return {cls.member_name: ref}
+
+ @controller.protected()
+ def create_identity_provider(self, context, idp_id, identity_provider):
+ identity_provider = self._normalize_dict(identity_provider)
+ identity_provider.setdefault('enabled', False)
+ IdentityProvider.check_immutable_params(identity_provider)
+ idp_ref = self.federation_api.create_idp(idp_id, identity_provider)
+ response = IdentityProvider.wrap_member(context, idp_ref)
+ return wsgi.render_response(body=response, status=('201', 'Created'))
+
+ @controller.protected()
+ def list_identity_providers(self, context):
+ ref = self.federation_api.list_idps()
+ ref = [self.filter_params(x) for x in ref]
+ return IdentityProvider.wrap_collection(context, ref)
+
+ @controller.protected()
+ def get_identity_provider(self, context, idp_id):
+ ref = self.federation_api.get_idp(idp_id)
+ return IdentityProvider.wrap_member(context, ref)
+
+ @controller.protected()
+ def delete_identity_provider(self, context, idp_id):
+ self.federation_api.delete_idp(idp_id)
+
+ @controller.protected()
+ def update_identity_provider(self, context, idp_id, identity_provider):
+ identity_provider = self._normalize_dict(identity_provider)
+ IdentityProvider.check_immutable_params(identity_provider)
+ idp_ref = self.federation_api.update_idp(idp_id, identity_provider)
+ return IdentityProvider.wrap_member(context, idp_ref)
+
+
+@dependency.requires('federation_api')
+class FederationProtocol(_ControllerBase):
+ """A federation protocol representation.
+
+ See IdentityProvider docstring for explanation on _mutable_parameters
+ and _public_parameters class attributes.
+
+ """
+ collection_name = 'protocols'
+ member_name = 'protocol'
+
+ _public_parameters = frozenset(['id', 'mapping_id', 'links'])
+ _mutable_parameters = frozenset(['mapping_id'])
+
+ @classmethod
+ def _add_self_referential_link(cls, context, ref):
+ """Add 'links' entry to the response dictionary.
+
+ Calls IdentityProvider.base_url() class method, as it constructs
+ proper URL along with the 'identity providers' part included.
+
+ :param ref: response dictionary
+
+ """
+ ref.setdefault('links', {})
+ base_path = ref['links'].get('identity_provider')
+ if base_path is None:
+ base_path = [IdentityProvider.base_url(context), ref['idp_id']]
+ base_path = '/'.join(base_path)
+ self_path = [base_path, 'protocols', ref['id']]
+ self_path = '/'.join(self_path)
+ ref['links']['self'] = self_path
+
+ @classmethod
+ def _add_related_links(cls, context, ref):
+ """Add new entries to the 'links' subdictionary in the response.
+
+ Adds 'identity_provider' key with URL pointing to related identity
+ provider as a value.
+
+ :param ref: response dictionary
+
+ """
+ ref.setdefault('links', {})
+ base_path = '/'.join([IdentityProvider.base_url(context),
+ ref['idp_id']])
+ ref['links']['identity_provider'] = base_path
+
+ @classmethod
+ def wrap_member(cls, context, ref):
+ cls._add_related_links(context, ref)
+ cls._add_self_referential_link(context, ref)
+ ref = cls.filter_params(ref)
+ return {cls.member_name: ref}
+
+ @controller.protected()
+ def create_protocol(self, context, idp_id, protocol_id, protocol):
+ ref = self._normalize_dict(protocol)
+ FederationProtocol.check_immutable_params(ref)
+ ref = self.federation_api.create_protocol(idp_id, protocol_id, ref)
+ response = FederationProtocol.wrap_member(context, ref)
+ return wsgi.render_response(body=response, status=('201', 'Created'))
+
+ @controller.protected()
+ def update_protocol(self, context, idp_id, protocol_id, protocol):
+ ref = self._normalize_dict(protocol)
+ FederationProtocol.check_immutable_params(ref)
+ ref = self.federation_api.update_protocol(idp_id, protocol_id,
+ protocol)
+ return FederationProtocol.wrap_member(context, ref)
+
+ @controller.protected()
+ def get_protocol(self, context, idp_id, protocol_id):
+ ref = self.federation_api.get_protocol(idp_id, protocol_id)
+ return FederationProtocol.wrap_member(context, ref)
+
+ @controller.protected()
+ def list_protocols(self, context, idp_id):
+ protocols_ref = self.federation_api.list_protocols(idp_id)
+ protocols = list(protocols_ref)
+ return FederationProtocol.wrap_collection(context, protocols)
+
+ @controller.protected()
+ def delete_protocol(self, context, idp_id, protocol_id):
+ self.federation_api.delete_protocol(idp_id, protocol_id)
+
+
+@dependency.requires('federation_api')
+class MappingController(_ControllerBase):
+ collection_name = 'mappings'
+ member_name = 'mapping'
+
+ @controller.protected()
+ def create_mapping(self, context, mapping_id, mapping):
+ ref = self._normalize_dict(mapping)
+ utils.validate_mapping_structure(ref)
+ mapping_ref = self.federation_api.create_mapping(mapping_id, ref)
+ response = MappingController.wrap_member(context, mapping_ref)
+ return wsgi.render_response(body=response, status=('201', 'Created'))
+
+ @controller.protected()
+ def list_mappings(self, context):
+ ref = self.federation_api.list_mappings()
+ return MappingController.wrap_collection(context, ref)
+
+ @controller.protected()
+ def get_mapping(self, context, mapping_id):
+ ref = self.federation_api.get_mapping(mapping_id)
+ return MappingController.wrap_member(context, ref)
+
+ @controller.protected()
+ def delete_mapping(self, context, mapping_id):
+ self.federation_api.delete_mapping(mapping_id)
+
+ @controller.protected()
+ def update_mapping(self, context, mapping_id, mapping):
+ mapping = self._normalize_dict(mapping)
+ utils.validate_mapping_structure(mapping)
+ mapping_ref = self.federation_api.update_mapping(mapping_id, mapping)
+ return MappingController.wrap_member(context, mapping_ref)
+
+
+@dependency.requires('federation_api')
+class Auth(auth_controllers.Auth):
+
+ def federated_authentication(self, context, identity_provider, protocol):
+ """Authenticate from dedicated url endpoint.
+
+ Build HTTP request body for federated authentication and inject
+ it into the ``authenticate_for_token`` function.
+
+ """
+ auth = {
+ 'identity': {
+ 'methods': [protocol],
+ protocol: {
+ 'identity_provider': identity_provider,
+ 'protocol': protocol
+ }
+ }
+ }
+
+ return self.authenticate_for_token(context, auth=auth)
+
+ def federated_sso_auth(self, context, protocol_id):
+ try:
+ remote_id_name = CONF.federation.remote_id_attribute
+ remote_id = context['environment'][remote_id_name]
+ except KeyError:
+ msg = _('Missing entity ID from environment')
+ LOG.error(msg)
+ raise exception.Unauthorized(msg)
+
+ if 'origin' in context['query_string']:
+ origin = context['query_string'].get('origin')
+ host = urllib.parse.unquote_plus(origin)
+ else:
+ msg = _('Request must have an origin query parameter')
+ LOG.error(msg)
+ raise exception.ValidationError(msg)
+
+ if host in CONF.federation.trusted_dashboard:
+ ref = self.federation_api.get_idp_from_remote_id(remote_id)
+ identity_provider = ref['id']
+ res = self.federated_authentication(context, identity_provider,
+ protocol_id)
+ token_id = res.headers['X-Subject-Token']
+ return self.render_html_response(host, token_id)
+ else:
+ msg = _('%(host)s is not a trusted dashboard host')
+ msg = msg % {'host': host}
+ LOG.error(msg)
+ raise exception.Unauthorized(msg)
+
+ def render_html_response(self, host, token_id):
+ """Forms an HTML Form from a template with autosubmit."""
+
+ headers = [('Content-Type', 'text/html')]
+
+ with open(CONF.federation.sso_callback_template) as template:
+ src = string.Template(template.read())
+
+ subs = {'host': host, 'token': token_id}
+ body = src.substitute(subs)
+ return webob.Response(body=body, status='200',
+ headerlist=headers)
+
+ @validation.validated(schema.saml_create, 'auth')
+ def create_saml_assertion(self, context, auth):
+ """Exchange a scoped token for a SAML assertion.
+
+ :param auth: Dictionary that contains a token and service provider id
+ :returns: SAML Assertion based on properties from the token
+ """
+
+ issuer = CONF.saml.idp_entity_id
+ sp_id = auth['scope']['service_provider']['id']
+ service_provider = self.federation_api.get_sp(sp_id)
+ utils.assert_enabled_service_provider_object(service_provider)
+
+ sp_url = service_provider.get('sp_url')
+ auth_url = service_provider.get('auth_url')
+
+ token_id = auth['identity']['token']['id']
+ token_data = self.token_provider_api.validate_token(token_id)
+ token_ref = token_model.KeystoneToken(token_id, token_data)
+ subject = token_ref.user_name
+ roles = token_ref.role_names
+
+ if not token_ref.project_scoped:
+ action = _('Use a project scoped token when attempting to create '
+ 'a SAML assertion')
+ raise exception.ForbiddenAction(action=action)
+
+ project = token_ref.project_name
+ generator = keystone_idp.SAMLGenerator()
+ response = generator.samlize_token(issuer, sp_url, subject, roles,
+ project)
+
+ return wsgi.render_response(body=response.to_string(),
+ status=('200', 'OK'),
+ headers=[('Content-Type', 'text/xml'),
+ ('X-sp-url',
+ six.binary_type(sp_url)),
+ ('X-auth-url',
+ six.binary_type(auth_url))])
+
+
+@dependency.requires('assignment_api', 'resource_api')
+class DomainV3(controller.V3Controller):
+ collection_name = 'domains'
+ member_name = 'domain'
+
+ def __init__(self):
+ super(DomainV3, self).__init__()
+ self.get_member_from_driver = self.resource_api.get_domain
+
+ @controller.protected()
+ def list_domains_for_groups(self, context):
+ """List all domains available to an authenticated user's groups.
+
+ :param context: request context
+ :returns: list of accessible domains
+
+ """
+ auth_context = context['environment'][authorization.AUTH_CONTEXT_ENV]
+ domains = self.assignment_api.list_domains_for_groups(
+ auth_context['group_ids'])
+ return DomainV3.wrap_collection(context, domains)
+
+
+@dependency.requires('assignment_api', 'resource_api')
+class ProjectAssignmentV3(controller.V3Controller):
+ collection_name = 'projects'
+ member_name = 'project'
+
+ def __init__(self):
+ super(ProjectAssignmentV3, self).__init__()
+ self.get_member_from_driver = self.resource_api.get_project
+
+ @controller.protected()
+ def list_projects_for_groups(self, context):
+ """List all projects available to an authenticated user's groups.
+
+ :param context: request context
+ :returns: list of accessible projects
+
+ """
+ auth_context = context['environment'][authorization.AUTH_CONTEXT_ENV]
+ projects = self.assignment_api.list_projects_for_groups(
+ auth_context['group_ids'])
+ return ProjectAssignmentV3.wrap_collection(context, projects)
+
+
+@dependency.requires('federation_api')
+class ServiceProvider(_ControllerBase):
+ """Service Provider representation."""
+
+ collection_name = 'service_providers'
+ member_name = 'service_provider'
+
+ _mutable_parameters = frozenset(['auth_url', 'description', 'enabled',
+ 'sp_url'])
+ _public_parameters = frozenset(['auth_url', 'id', 'enabled', 'description',
+ 'links', 'sp_url'])
+
+ @controller.protected()
+ @validation.validated(schema.service_provider_create, 'service_provider')
+ def create_service_provider(self, context, sp_id, service_provider):
+ service_provider = self._normalize_dict(service_provider)
+ service_provider.setdefault('enabled', False)
+ ServiceProvider.check_immutable_params(service_provider)
+ sp_ref = self.federation_api.create_sp(sp_id, service_provider)
+ response = ServiceProvider.wrap_member(context, sp_ref)
+ return wsgi.render_response(body=response, status=('201', 'Created'))
+
+ @controller.protected()
+ def list_service_providers(self, context):
+ ref = self.federation_api.list_sps()
+ ref = [self.filter_params(x) for x in ref]
+ return ServiceProvider.wrap_collection(context, ref)
+
+ @controller.protected()
+ def get_service_provider(self, context, sp_id):
+ ref = self.federation_api.get_sp(sp_id)
+ return ServiceProvider.wrap_member(context, ref)
+
+ @controller.protected()
+ def delete_service_provider(self, context, sp_id):
+ self.federation_api.delete_sp(sp_id)
+
+ @controller.protected()
+ @validation.validated(schema.service_provider_update, 'service_provider')
+ def update_service_provider(self, context, sp_id, service_provider):
+ service_provider = self._normalize_dict(service_provider)
+ ServiceProvider.check_immutable_params(service_provider)
+ sp_ref = self.federation_api.update_sp(sp_id, service_provider)
+ return ServiceProvider.wrap_member(context, sp_ref)
+
+
+class SAMLMetadataV3(_ControllerBase):
+ member_name = 'metadata'
+
+ def get_metadata(self, context):
+ metadata_path = CONF.saml.idp_metadata_path
+ try:
+ with open(metadata_path, 'r') as metadata_handler:
+ metadata = metadata_handler.read()
+ except IOError as e:
+ # Raise HTTP 500 in case Metadata file cannot be read.
+ raise exception.MetadataFileError(reason=e)
+ return wsgi.render_response(body=metadata, status=('200', 'OK'),
+ headers=[('Content-Type', 'text/xml')])
diff --git a/keystone-moon/keystone/contrib/federation/core.py b/keystone-moon/keystone/contrib/federation/core.py
new file mode 100644
index 00000000..b596cff7
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/core.py
@@ -0,0 +1,346 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Extension supporting Federation."""
+
+import abc
+
+from oslo_config import cfg
+from oslo_log import log as logging
+import six
+
+from keystone.common import dependency
+from keystone.common import extension
+from keystone.common import manager
+from keystone import exception
+
+
+CONF = cfg.CONF
+LOG = logging.getLogger(__name__)
+EXTENSION_DATA = {
+ 'name': 'OpenStack Federation APIs',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-FEDERATION/v1.0',
+ 'alias': 'OS-FEDERATION',
+ 'updated': '2013-12-17T12:00:0-00:00',
+ 'description': 'OpenStack Identity Providers Mechanism.',
+ 'links': [{
+ 'rel': 'describedby',
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api'
+ }]}
+extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+
+FEDERATION = 'OS-FEDERATION'
+IDENTITY_PROVIDER = 'OS-FEDERATION:identity_provider'
+PROTOCOL = 'OS-FEDERATION:protocol'
+FEDERATED_DOMAIN_KEYWORD = 'Federated'
+
+
+@dependency.provider('federation_api')
+class Manager(manager.Manager):
+ """Default pivot point for the Federation backend.
+
+ See :mod:`keystone.common.manager.Manager` for more details on how this
+ dynamically calls the backend.
+
+ """
+ def __init__(self):
+ super(Manager, self).__init__(CONF.federation.driver)
+
+ def get_enabled_service_providers(self):
+ """List enabled service providers for Service Catalog
+
+ Service Provider in a catalog contains three attributes: ``id``,
+ ``auth_url``, ``sp_url``, where:
+
+ - id is an unique, user defined identifier for service provider object
+ - auth_url is a authentication URL of remote Keystone
+ - sp_url a URL accessible at the remote service provider where SAML
+ assertion is transmitted.
+
+ :returns: list of dictionaries with enabled service providers
+ :rtype: list of dicts
+
+ """
+ def normalize(sp):
+ ref = {
+ 'auth_url': sp.auth_url,
+ 'id': sp.id,
+ 'sp_url': sp.sp_url
+ }
+ return ref
+
+ service_providers = self.driver.get_enabled_service_providers()
+ return [normalize(sp) for sp in service_providers]
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Driver(object):
+
+ @abc.abstractmethod
+ def create_idp(self, idp_id, idp):
+ """Create an identity provider.
+
+ :returns: idp_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_idp(self, idp_id):
+ """Delete an identity provider.
+
+ :raises: keystone.exception.IdentityProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_idps(self):
+ """List all identity providers.
+
+ :raises: keystone.exception.IdentityProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_idp(self, idp_id):
+ """Get an identity provider by ID.
+
+ :raises: keystone.exception.IdentityProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_idp_from_remote_id(self, remote_id):
+ """Get an identity provider by remote ID.
+
+ :raises: keystone.exception.IdentityProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def update_idp(self, idp_id, idp):
+ """Update an identity provider by ID.
+
+ :raises: keystone.exception.IdentityProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def create_protocol(self, idp_id, protocol_id, protocol):
+ """Add an IdP-Protocol configuration.
+
+ :raises: keystone.exception.IdentityProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def update_protocol(self, idp_id, protocol_id, protocol):
+ """Change an IdP-Protocol configuration.
+
+ :raises: keystone.exception.IdentityProviderNotFound,
+ keystone.exception.FederatedProtocolNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_protocol(self, idp_id, protocol_id):
+ """Get an IdP-Protocol configuration.
+
+ :raises: keystone.exception.IdentityProviderNotFound,
+ keystone.exception.FederatedProtocolNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_protocols(self, idp_id):
+ """List an IdP's supported protocols.
+
+ :raises: keystone.exception.IdentityProviderNotFound,
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_protocol(self, idp_id, protocol_id):
+ """Delete an IdP-Protocol configuration.
+
+ :raises: keystone.exception.IdentityProviderNotFound,
+ keystone.exception.FederatedProtocolNotFound,
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def create_mapping(self, mapping_ref):
+ """Create a mapping.
+
+ :param mapping_ref: mapping ref with mapping name
+ :type mapping_ref: dict
+ :returns: mapping_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_mapping(self, mapping_id):
+ """Delete a mapping.
+
+ :param mapping_id: id of mapping to delete
+ :type mapping_ref: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def update_mapping(self, mapping_id, mapping_ref):
+ """Update a mapping.
+
+ :param mapping_id: id of mapping to update
+ :type mapping_id: string
+ :param mapping_ref: new mapping ref
+ :type mapping_ref: dict
+ :returns: mapping_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_mappings(self):
+ """List all mappings.
+
+ returns: list of mappings
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_mapping(self, mapping_id):
+ """Get a mapping, returns the mapping based
+ on mapping_id.
+
+ :param mapping_id: id of mapping to get
+ :type mapping_ref: string
+ :returns: mapping_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_mapping_from_idp_and_protocol(self, idp_id, protocol_id):
+ """Get mapping based on idp_id and protocol_id.
+
+ :param idp_id: id of the identity provider
+ :type idp_id: string
+ :param protocol_id: id of the protocol
+ :type protocol_id: string
+ :raises: keystone.exception.IdentityProviderNotFound,
+ keystone.exception.FederatedProtocolNotFound,
+ :returns: mapping_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def create_sp(self, sp_id, sp):
+ """Create a service provider.
+
+ :param sp_id: id of the service provider
+ :type sp_id: string
+ :param sp: service prvider object
+ :type sp: dict
+
+ :returns: sp_ref
+ :rtype: dict
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_sp(self, sp_id):
+ """Delete a service provider.
+
+ :param sp_id: id of the service provider
+ :type sp_id: string
+
+ :raises: keystone.exception.ServiceProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_sps(self):
+ """List all service providers.
+
+ :returns List of sp_ref objects
+ :rtype: list of dicts
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_sp(self, sp_id):
+ """Get a service provider.
+
+ :param sp_id: id of the service provider
+ :type sp_id: string
+
+ :returns: sp_ref
+ :raises: keystone.exception.ServiceProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def update_sp(self, sp_id, sp):
+ """Update a service provider.
+
+ :param sp_id: id of the service provider
+ :type sp_id: string
+ :param sp: service prvider object
+ :type sp: dict
+
+ :returns: sp_ref
+ :rtype: dict
+
+ :raises: keystone.exception.ServiceProviderNotFound
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_enabled_service_providers(self):
+ """List enabled service providers for Service Catalog
+
+ Service Provider in a catalog contains three attributes: ``id``,
+ ``auth_url``, ``sp_url``, where:
+
+ - id is an unique, user defined identifier for service provider object
+ - auth_url is a authentication URL of remote Keystone
+ - sp_url a URL accessible at the remote service provider where SAML
+ assertion is transmitted.
+
+ :returns: list of dictionaries with enabled service providers
+ :rtype: list of dicts
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
diff --git a/keystone-moon/keystone/contrib/federation/idp.py b/keystone-moon/keystone/contrib/federation/idp.py
new file mode 100644
index 00000000..bf400135
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/idp.py
@@ -0,0 +1,558 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import os
+import subprocess
+import uuid
+
+from oslo_config import cfg
+from oslo_log import log
+from oslo_utils import timeutils
+import saml2
+from saml2 import md
+from saml2 import saml
+from saml2 import samlp
+from saml2 import sigver
+import xmldsig
+
+from keystone import exception
+from keystone.i18n import _, _LE
+from keystone.openstack.common import fileutils
+
+
+LOG = log.getLogger(__name__)
+CONF = cfg.CONF
+
+
+class SAMLGenerator(object):
+ """A class to generate SAML assertions."""
+
+ def __init__(self):
+ self.assertion_id = uuid.uuid4().hex
+
+ def samlize_token(self, issuer, recipient, user, roles, project,
+ expires_in=None):
+ """Convert Keystone attributes to a SAML assertion.
+
+ :param issuer: URL of the issuing party
+ :type issuer: string
+ :param recipient: URL of the recipient
+ :type recipient: string
+ :param user: User name
+ :type user: string
+ :param roles: List of role names
+ :type roles: list
+ :param project: Project name
+ :type project: string
+ :param expires_in: Sets how long the assertion is valid for, in seconds
+ :type expires_in: int
+
+ :return: XML <Response> object
+
+ """
+ expiration_time = self._determine_expiration_time(expires_in)
+ status = self._create_status()
+ saml_issuer = self._create_issuer(issuer)
+ subject = self._create_subject(user, expiration_time, recipient)
+ attribute_statement = self._create_attribute_statement(user, roles,
+ project)
+ authn_statement = self._create_authn_statement(issuer, expiration_time)
+ signature = self._create_signature()
+
+ assertion = self._create_assertion(saml_issuer, signature,
+ subject, authn_statement,
+ attribute_statement)
+
+ assertion = _sign_assertion(assertion)
+
+ response = self._create_response(saml_issuer, status, assertion,
+ recipient)
+ return response
+
+ def _determine_expiration_time(self, expires_in):
+ if expires_in is None:
+ expires_in = CONF.saml.assertion_expiration_time
+ now = timeutils.utcnow()
+ future = now + datetime.timedelta(seconds=expires_in)
+ return timeutils.isotime(future, subsecond=True)
+
+ def _create_status(self):
+ """Create an object that represents a SAML Status.
+
+ <ns0:Status xmlns:ns0="urn:oasis:names:tc:SAML:2.0:protocol">
+ <ns0:StatusCode
+ Value="urn:oasis:names:tc:SAML:2.0:status:Success" />
+ </ns0:Status>
+
+ :return: XML <Status> object
+
+ """
+ status = samlp.Status()
+ status_code = samlp.StatusCode()
+ status_code.value = samlp.STATUS_SUCCESS
+ status_code.set_text('')
+ status.status_code = status_code
+ return status
+
+ def _create_issuer(self, issuer_url):
+ """Create an object that represents a SAML Issuer.
+
+ <ns0:Issuer
+ xmlns:ns0="urn:oasis:names:tc:SAML:2.0:assertion"
+ Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity">
+ https://acme.com/FIM/sps/openstack/saml20</ns0:Issuer>
+
+ :return: XML <Issuer> object
+
+ """
+ issuer = saml.Issuer()
+ issuer.format = saml.NAMEID_FORMAT_ENTITY
+ issuer.set_text(issuer_url)
+ return issuer
+
+ def _create_subject(self, user, expiration_time, recipient):
+ """Create an object that represents a SAML Subject.
+
+ <ns0:Subject>
+ <ns0:NameID>
+ john@smith.com</ns0:NameID>
+ <ns0:SubjectConfirmation
+ Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
+ <ns0:SubjectConfirmationData
+ NotOnOrAfter="2014-08-19T11:53:57.243106Z"
+ Recipient="http://beta.com/Shibboleth.sso/SAML2/POST" />
+ </ns0:SubjectConfirmation>
+ </ns0:Subject>
+
+ :return: XML <Subject> object
+
+ """
+ name_id = saml.NameID()
+ name_id.set_text(user)
+ subject_conf_data = saml.SubjectConfirmationData()
+ subject_conf_data.recipient = recipient
+ subject_conf_data.not_on_or_after = expiration_time
+ subject_conf = saml.SubjectConfirmation()
+ subject_conf.method = saml.SCM_BEARER
+ subject_conf.subject_confirmation_data = subject_conf_data
+ subject = saml.Subject()
+ subject.subject_confirmation = subject_conf
+ subject.name_id = name_id
+ return subject
+
+ def _create_attribute_statement(self, user, roles, project):
+ """Create an object that represents a SAML AttributeStatement.
+
+ <ns0:AttributeStatement
+ xmlns:ns0="urn:oasis:names:tc:SAML:2.0:assertion"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <ns0:Attribute Name="openstack_user">
+ <ns0:AttributeValue
+ xsi:type="xs:string">test_user</ns0:AttributeValue>
+ </ns0:Attribute>
+ <ns0:Attribute Name="openstack_roles">
+ <ns0:AttributeValue
+ xsi:type="xs:string">admin</ns0:AttributeValue>
+ <ns0:AttributeValue
+ xsi:type="xs:string">member</ns0:AttributeValue>
+ </ns0:Attribute>
+ <ns0:Attribute Name="openstack_projects">
+ <ns0:AttributeValue
+ xsi:type="xs:string">development</ns0:AttributeValue>
+ </ns0:Attribute>
+ </ns0:AttributeStatement>
+
+ :return: XML <AttributeStatement> object
+
+ """
+ openstack_user = 'openstack_user'
+ user_attribute = saml.Attribute()
+ user_attribute.name = openstack_user
+ user_value = saml.AttributeValue()
+ user_value.set_text(user)
+ user_attribute.attribute_value = user_value
+
+ openstack_roles = 'openstack_roles'
+ roles_attribute = saml.Attribute()
+ roles_attribute.name = openstack_roles
+
+ for role in roles:
+ role_value = saml.AttributeValue()
+ role_value.set_text(role)
+ roles_attribute.attribute_value.append(role_value)
+
+ openstack_project = 'openstack_project'
+ project_attribute = saml.Attribute()
+ project_attribute.name = openstack_project
+ project_value = saml.AttributeValue()
+ project_value.set_text(project)
+ project_attribute.attribute_value = project_value
+
+ attribute_statement = saml.AttributeStatement()
+ attribute_statement.attribute.append(user_attribute)
+ attribute_statement.attribute.append(roles_attribute)
+ attribute_statement.attribute.append(project_attribute)
+ return attribute_statement
+
+ def _create_authn_statement(self, issuer, expiration_time):
+ """Create an object that represents a SAML AuthnStatement.
+
+ <ns0:AuthnStatement xmlns:ns0="urn:oasis:names:tc:SAML:2.0:assertion"
+ AuthnInstant="2014-07-30T03:04:25Z" SessionIndex="47335964efb"
+ SessionNotOnOrAfter="2014-07-30T03:04:26Z">
+ <ns0:AuthnContext>
+ <ns0:AuthnContextClassRef>
+ urn:oasis:names:tc:SAML:2.0:ac:classes:Password
+ </ns0:AuthnContextClassRef>
+ <ns0:AuthenticatingAuthority>
+ https://acme.com/FIM/sps/openstack/saml20
+ </ns0:AuthenticatingAuthority>
+ </ns0:AuthnContext>
+ </ns0:AuthnStatement>
+
+ :return: XML <AuthnStatement> object
+
+ """
+ authn_statement = saml.AuthnStatement()
+ authn_statement.authn_instant = timeutils.isotime()
+ authn_statement.session_index = uuid.uuid4().hex
+ authn_statement.session_not_on_or_after = expiration_time
+
+ authn_context = saml.AuthnContext()
+ authn_context_class = saml.AuthnContextClassRef()
+ authn_context_class.set_text(saml.AUTHN_PASSWORD)
+
+ authn_authority = saml.AuthenticatingAuthority()
+ authn_authority.set_text(issuer)
+ authn_context.authn_context_class_ref = authn_context_class
+ authn_context.authenticating_authority = authn_authority
+
+ authn_statement.authn_context = authn_context
+
+ return authn_statement
+
+ def _create_assertion(self, issuer, signature, subject, authn_statement,
+ attribute_statement):
+ """Create an object that represents a SAML Assertion.
+
+ <ns0:Assertion
+ ID="35daed258ba647ba8962e9baff4d6a46"
+ IssueInstant="2014-06-11T15:45:58Z"
+ Version="2.0">
+ <ns0:Issuer> ... </ns0:Issuer>
+ <ns1:Signature> ... </ns1:Signature>
+ <ns0:Subject> ... </ns0:Subject>
+ <ns0:AuthnStatement> ... </ns0:AuthnStatement>
+ <ns0:AttributeStatement> ... </ns0:AttributeStatement>
+ </ns0:Assertion>
+
+ :return: XML <Assertion> object
+
+ """
+ assertion = saml.Assertion()
+ assertion.id = self.assertion_id
+ assertion.issue_instant = timeutils.isotime()
+ assertion.version = '2.0'
+ assertion.issuer = issuer
+ assertion.signature = signature
+ assertion.subject = subject
+ assertion.authn_statement = authn_statement
+ assertion.attribute_statement = attribute_statement
+ return assertion
+
+ def _create_response(self, issuer, status, assertion, recipient):
+ """Create an object that represents a SAML Response.
+
+ <ns0:Response
+ Destination="http://beta.com/Shibboleth.sso/SAML2/POST"
+ ID="c5954543230e4e778bc5b92923a0512d"
+ IssueInstant="2014-07-30T03:19:45Z"
+ Version="2.0" />
+ <ns0:Issuer> ... </ns0:Issuer>
+ <ns0:Assertion> ... </ns0:Assertion>
+ <ns0:Status> ... </ns0:Status>
+ </ns0:Response>
+
+ :return: XML <Response> object
+
+ """
+ response = samlp.Response()
+ response.id = uuid.uuid4().hex
+ response.destination = recipient
+ response.issue_instant = timeutils.isotime()
+ response.version = '2.0'
+ response.issuer = issuer
+ response.status = status
+ response.assertion = assertion
+ return response
+
+ def _create_signature(self):
+ """Create an object that represents a SAML <Signature>.
+
+ This must be filled with algorithms that the signing binary will apply
+ in order to sign the whole message.
+ Currently we enforce X509 signing.
+ Example of the template::
+
+ <Signature xmlns="http://www.w3.org/2000/09/xmldsig#">
+ <SignedInfo>
+ <CanonicalizationMethod
+ Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ <SignatureMethod
+ Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/>
+ <Reference URI="#<Assertion ID>">
+ <Transforms>
+ <Transform
+ Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/>
+ <Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
+ </Transforms>
+ <DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/>
+ <DigestValue />
+ </Reference>
+ </SignedInfo>
+ <SignatureValue />
+ <KeyInfo>
+ <X509Data />
+ </KeyInfo>
+ </Signature>
+
+ :return: XML <Signature> object
+
+ """
+ canonicalization_method = xmldsig.CanonicalizationMethod()
+ canonicalization_method.algorithm = xmldsig.ALG_EXC_C14N
+ signature_method = xmldsig.SignatureMethod(
+ algorithm=xmldsig.SIG_RSA_SHA1)
+
+ transforms = xmldsig.Transforms()
+ envelope_transform = xmldsig.Transform(
+ algorithm=xmldsig.TRANSFORM_ENVELOPED)
+
+ c14_transform = xmldsig.Transform(algorithm=xmldsig.ALG_EXC_C14N)
+ transforms.transform = [envelope_transform, c14_transform]
+
+ digest_method = xmldsig.DigestMethod(algorithm=xmldsig.DIGEST_SHA1)
+ digest_value = xmldsig.DigestValue()
+
+ reference = xmldsig.Reference()
+ reference.uri = '#' + self.assertion_id
+ reference.digest_method = digest_method
+ reference.digest_value = digest_value
+ reference.transforms = transforms
+
+ signed_info = xmldsig.SignedInfo()
+ signed_info.canonicalization_method = canonicalization_method
+ signed_info.signature_method = signature_method
+ signed_info.reference = reference
+
+ key_info = xmldsig.KeyInfo()
+ key_info.x509_data = xmldsig.X509Data()
+
+ signature = xmldsig.Signature()
+ signature.signed_info = signed_info
+ signature.signature_value = xmldsig.SignatureValue()
+ signature.key_info = key_info
+
+ return signature
+
+
+def _sign_assertion(assertion):
+ """Sign a SAML assertion.
+
+ This method utilizes ``xmlsec1`` binary and signs SAML assertions in a
+ separate process. ``xmlsec1`` cannot read input data from stdin so the
+ prepared assertion needs to be serialized and stored in a temporary
+ file. This file will be deleted immediately after ``xmlsec1`` returns.
+ The signed assertion is redirected to a standard output and read using
+ subprocess.PIPE redirection. A ``saml.Assertion`` class is created
+ from the signed string again and returned.
+
+ Parameters that are required in the CONF::
+ * xmlsec_binary
+ * private key file path
+ * public key file path
+ :return: XML <Assertion> object
+
+ """
+ xmlsec_binary = CONF.saml.xmlsec1_binary
+ idp_private_key = CONF.saml.keyfile
+ idp_public_key = CONF.saml.certfile
+
+ # xmlsec1 --sign --privkey-pem privkey,cert --id-attr:ID <tag> <file>
+ certificates = '%(idp_private_key)s,%(idp_public_key)s' % {
+ 'idp_public_key': idp_public_key,
+ 'idp_private_key': idp_private_key
+ }
+
+ command_list = [xmlsec_binary, '--sign', '--privkey-pem', certificates,
+ '--id-attr:ID', 'Assertion']
+
+ try:
+ # NOTE(gyee): need to make the namespace prefixes explicit so
+ # they won't get reassigned when we wrap the assertion into
+ # SAML2 response
+ file_path = fileutils.write_to_tempfile(assertion.to_string(
+ nspair={'saml': saml2.NAMESPACE,
+ 'xmldsig': xmldsig.NAMESPACE}))
+ command_list.append(file_path)
+ stdout = subprocess.check_output(command_list)
+ except Exception as e:
+ msg = _LE('Error when signing assertion, reason: %(reason)s')
+ msg = msg % {'reason': e}
+ LOG.error(msg)
+ raise exception.SAMLSigningError(reason=e)
+ finally:
+ try:
+ os.remove(file_path)
+ except OSError:
+ pass
+
+ return saml2.create_class_from_xml_string(saml.Assertion, stdout)
+
+
+class MetadataGenerator(object):
+ """A class for generating SAML IdP Metadata."""
+
+ def generate_metadata(self):
+ """Generate Identity Provider Metadata.
+
+ Generate and format metadata into XML that can be exposed and
+ consumed by a federated Service Provider.
+
+ :return: XML <EntityDescriptor> object.
+ :raises: keystone.exception.ValidationError: Raises if the required
+ config options aren't set.
+
+ """
+ self._ensure_required_values_present()
+ entity_descriptor = self._create_entity_descriptor()
+ entity_descriptor.idpsso_descriptor = (
+ self._create_idp_sso_descriptor())
+ return entity_descriptor
+
+ def _create_entity_descriptor(self):
+ ed = md.EntityDescriptor()
+ ed.entity_id = CONF.saml.idp_entity_id
+ return ed
+
+ def _create_idp_sso_descriptor(self):
+
+ def get_cert():
+ try:
+ return sigver.read_cert_from_file(CONF.saml.certfile, 'pem')
+ except (IOError, sigver.CertificateError) as e:
+ msg = _('Cannot open certificate %(cert_file)s. '
+ 'Reason: %(reason)s')
+ msg = msg % {'cert_file': CONF.saml.certfile, 'reason': e}
+ LOG.error(msg)
+ raise IOError(msg)
+
+ def key_descriptor():
+ cert = get_cert()
+ return md.KeyDescriptor(
+ key_info=xmldsig.KeyInfo(
+ x509_data=xmldsig.X509Data(
+ x509_certificate=xmldsig.X509Certificate(text=cert)
+ )
+ ), use='signing'
+ )
+
+ def single_sign_on_service():
+ idp_sso_endpoint = CONF.saml.idp_sso_endpoint
+ return md.SingleSignOnService(
+ binding=saml2.BINDING_URI,
+ location=idp_sso_endpoint)
+
+ def organization():
+ name = md.OrganizationName(lang=CONF.saml.idp_lang,
+ text=CONF.saml.idp_organization_name)
+ display_name = md.OrganizationDisplayName(
+ lang=CONF.saml.idp_lang,
+ text=CONF.saml.idp_organization_display_name)
+ url = md.OrganizationURL(lang=CONF.saml.idp_lang,
+ text=CONF.saml.idp_organization_url)
+
+ return md.Organization(
+ organization_display_name=display_name,
+ organization_url=url, organization_name=name)
+
+ def contact_person():
+ company = md.Company(text=CONF.saml.idp_contact_company)
+ given_name = md.GivenName(text=CONF.saml.idp_contact_name)
+ surname = md.SurName(text=CONF.saml.idp_contact_surname)
+ email = md.EmailAddress(text=CONF.saml.idp_contact_email)
+ telephone = md.TelephoneNumber(
+ text=CONF.saml.idp_contact_telephone)
+ contact_type = CONF.saml.idp_contact_type
+
+ return md.ContactPerson(
+ company=company, given_name=given_name, sur_name=surname,
+ email_address=email, telephone_number=telephone,
+ contact_type=contact_type)
+
+ def name_id_format():
+ return md.NameIDFormat(text=saml.NAMEID_FORMAT_TRANSIENT)
+
+ idpsso = md.IDPSSODescriptor()
+ idpsso.protocol_support_enumeration = samlp.NAMESPACE
+ idpsso.key_descriptor = key_descriptor()
+ idpsso.single_sign_on_service = single_sign_on_service()
+ idpsso.name_id_format = name_id_format()
+ if self._check_organization_values():
+ idpsso.organization = organization()
+ if self._check_contact_person_values():
+ idpsso.contact_person = contact_person()
+ return idpsso
+
+ def _ensure_required_values_present(self):
+ """Ensure idp_sso_endpoint and idp_entity_id have values."""
+
+ if CONF.saml.idp_entity_id is None:
+ msg = _('Ensure configuration option idp_entity_id is set.')
+ raise exception.ValidationError(msg)
+ if CONF.saml.idp_sso_endpoint is None:
+ msg = _('Ensure configuration option idp_sso_endpoint is set.')
+ raise exception.ValidationError(msg)
+
+ def _check_contact_person_values(self):
+ """Determine if contact information is included in metadata."""
+
+ # Check if we should include contact information
+ params = [CONF.saml.idp_contact_company,
+ CONF.saml.idp_contact_name,
+ CONF.saml.idp_contact_surname,
+ CONF.saml.idp_contact_email,
+ CONF.saml.idp_contact_telephone]
+ for value in params:
+ if value is None:
+ return False
+
+ # Check if contact type is an invalid value
+ valid_type_values = ['technical', 'other', 'support', 'administrative',
+ 'billing']
+ if CONF.saml.idp_contact_type not in valid_type_values:
+ msg = _('idp_contact_type must be one of: [technical, other, '
+ 'support, administrative or billing.')
+ raise exception.ValidationError(msg)
+ return True
+
+ def _check_organization_values(self):
+ """Determine if organization information is included in metadata."""
+
+ params = [CONF.saml.idp_organization_name,
+ CONF.saml.idp_organization_display_name,
+ CONF.saml.idp_organization_url]
+ for value in params:
+ if value is None:
+ return False
+ return True
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/federation/migrate_repo/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/__init__.py
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg
new file mode 100644
index 00000000..464ab62b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg
@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=federation
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py
new file mode 100644
index 00000000..cfb6f2c4
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py
@@ -0,0 +1,51 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ idp_table = sql.Table(
+ 'identity_provider',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('enabled', sql.Boolean, nullable=False),
+ sql.Column('description', sql.Text(), nullable=True),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+
+ idp_table.create(migrate_engine, checkfirst=True)
+
+ federation_protocol_table = sql.Table(
+ 'federation_protocol',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('idp_id', sql.String(64),
+ sql.ForeignKey('identity_provider.id', ondelete='CASCADE'),
+ primary_key=True),
+ sql.Column('mapping_id', sql.String(64), nullable=True),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+
+ federation_protocol_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ tables = ['federation_protocol', 'identity_provider']
+ for table_name in tables:
+ table = sql.Table(table_name, meta, autoload=True)
+ table.drop()
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py
new file mode 100644
index 00000000..f827f9a9
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py
@@ -0,0 +1,37 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ mapping_table = sql.Table(
+ 'mapping',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('rules', sql.Text(), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ mapping_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ # Drop previously created tables
+ tables = ['mapping']
+ for table_name in tables:
+ table = sql.Table(table_name, meta, autoload=True)
+ table.drop()
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py
new file mode 100644
index 00000000..eb8b2378
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py
@@ -0,0 +1,35 @@
+# Copyright 2014 Mirantis.inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sa
+
+
+def upgrade(migrate_engine):
+ meta = sa.MetaData(bind=migrate_engine)
+ federation_protocol = sa.Table('federation_protocol', meta, autoload=True)
+ # NOTE(i159): The column is changed to non-nullable. To prevent
+ # database errors when the column will be altered, all the existing
+ # null-records should be filled with not null values.
+ stmt = (federation_protocol.update().
+ where(federation_protocol.c.mapping_id.is_(None)).
+ values(mapping_id=''))
+ migrate_engine.execute(stmt)
+ federation_protocol.c.mapping_id.alter(nullable=False)
+
+
+def downgrade(migrate_engine):
+ meta = sa.MetaData(bind=migrate_engine)
+ federation_protocol = sa.Table('federation_protocol', meta, autoload=True)
+ federation_protocol.c.mapping_id.alter(nullable=True)
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py
new file mode 100644
index 00000000..dbe5d1f1
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py
@@ -0,0 +1,30 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_db.sqlalchemy import utils
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ idp_table = utils.get_table(migrate_engine, 'identity_provider')
+ remote_id = sql.Column('remote_id', sql.String(256), nullable=True)
+ idp_table.create_column(remote_id)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ idp_table = utils.get_table(migrate_engine, 'identity_provider')
+ idp_table.drop_column('remote_id')
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py
new file mode 100644
index 00000000..bff6a252
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py
@@ -0,0 +1,38 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ sp_table = sql.Table(
+ 'service_provider',
+ meta,
+ sql.Column('auth_url', sql.String(256), nullable=True),
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('enabled', sql.Boolean, nullable=False),
+ sql.Column('description', sql.Text(), nullable=True),
+ sql.Column('sp_url', sql.String(256), nullable=True),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+
+ sp_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ table = sql.Table('service_provider', meta, autoload=True)
+ table.drop()
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py
new file mode 100644
index 00000000..8a42ce3a
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py
@@ -0,0 +1,48 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+_SP_TABLE_NAME = 'service_provider'
+
+
+def _update_null_columns(migrate_engine, sp_table):
+ stmt = (sp_table.update().
+ where(sp_table.c.auth_url.is_(None)).
+ values(auth_url=''))
+ migrate_engine.execute(stmt)
+
+ stmt = (sp_table.update().
+ where(sp_table.c.sp_url.is_(None)).
+ values(sp_url=''))
+ migrate_engine.execute(stmt)
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ sp_table = sql.Table(_SP_TABLE_NAME, meta, autoload=True)
+ # The columns are being changed to non-nullable. To prevent
+ # database errors when both are altered, all the existing
+ # null-records should be filled with not null values.
+ _update_null_columns(migrate_engine, sp_table)
+
+ sp_table.c.auth_url.alter(nullable=False)
+ sp_table.c.sp_url.alter(nullable=False)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ sp_table = sql.Table(_SP_TABLE_NAME, meta, autoload=True)
+ sp_table.c.auth_url.alter(nullable=True)
+ sp_table.c.sp_url.alter(nullable=True)
diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/__init__.py
diff --git a/keystone-moon/keystone/contrib/federation/routers.py b/keystone-moon/keystone/contrib/federation/routers.py
new file mode 100644
index 00000000..9a6224b7
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/routers.py
@@ -0,0 +1,226 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.federation import controllers
+
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-FEDERATION', extension_version='1.0')
+
+build_parameter_relation = functools.partial(
+ json_home.build_v3_extension_parameter_relation,
+ extension_name='OS-FEDERATION', extension_version='1.0')
+
+IDP_ID_PARAMETER_RELATION = build_parameter_relation(parameter_name='idp_id')
+PROTOCOL_ID_PARAMETER_RELATION = build_parameter_relation(
+ parameter_name='protocol_id')
+SP_ID_PARAMETER_RELATION = build_parameter_relation(parameter_name='sp_id')
+
+
+class FederationExtension(wsgi.V3ExtensionRouter):
+ """API Endpoints for the Federation extension.
+
+ The API looks like::
+
+ PUT /OS-FEDERATION/identity_providers/$identity_provider
+ GET /OS-FEDERATION/identity_providers
+ GET /OS-FEDERATION/identity_providers/$identity_provider
+ DELETE /OS-FEDERATION/identity_providers/$identity_provider
+ PATCH /OS-FEDERATION/identity_providers/$identity_provider
+
+ PUT /OS-FEDERATION/identity_providers/
+ $identity_provider/protocols/$protocol
+ GET /OS-FEDERATION/identity_providers/
+ $identity_provider/protocols
+ GET /OS-FEDERATION/identity_providers/
+ $identity_provider/protocols/$protocol
+ PATCH /OS-FEDERATION/identity_providers/
+ $identity_provider/protocols/$protocol
+ DELETE /OS-FEDERATION/identity_providers/
+ $identity_provider/protocols/$protocol
+
+ PUT /OS-FEDERATION/mappings
+ GET /OS-FEDERATION/mappings
+ PATCH /OS-FEDERATION/mappings/$mapping_id
+ GET /OS-FEDERATION/mappings/$mapping_id
+ DELETE /OS-FEDERATION/mappings/$mapping_id
+
+ GET /OS-FEDERATION/projects
+ GET /OS-FEDERATION/domains
+
+ PUT /OS-FEDERATION/service_providers/$service_provider
+ GET /OS-FEDERATION/service_providers
+ GET /OS-FEDERATION/service_providers/$service_provider
+ DELETE /OS-FEDERATION/service_providers/$service_provider
+ PATCH /OS-FEDERATION/service_providers/$service_provider
+
+ GET /OS-FEDERATION/identity_providers/$identity_provider/
+ protocols/$protocol/auth
+ POST /OS-FEDERATION/identity_providers/$identity_provider/
+ protocols/$protocol/auth
+
+ POST /auth/OS-FEDERATION/saml2
+ GET /OS-FEDERATION/saml2/metadata
+
+ GET /auth/OS-FEDERATION/websso/{protocol_id}
+ ?origin=https%3A//horizon.example.com
+
+ POST /auth/OS-FEDERATION/websso/{protocol_id}
+ ?origin=https%3A//horizon.example.com
+
+ """
+ def _construct_url(self, suffix):
+ return "/OS-FEDERATION/%s" % suffix
+
+ def add_routes(self, mapper):
+ auth_controller = controllers.Auth()
+ idp_controller = controllers.IdentityProvider()
+ protocol_controller = controllers.FederationProtocol()
+ mapping_controller = controllers.MappingController()
+ project_controller = controllers.ProjectAssignmentV3()
+ domain_controller = controllers.DomainV3()
+ saml_metadata_controller = controllers.SAMLMetadataV3()
+ sp_controller = controllers.ServiceProvider()
+
+ # Identity Provider CRUD operations
+
+ self._add_resource(
+ mapper, idp_controller,
+ path=self._construct_url('identity_providers/{idp_id}'),
+ get_action='get_identity_provider',
+ put_action='create_identity_provider',
+ patch_action='update_identity_provider',
+ delete_action='delete_identity_provider',
+ rel=build_resource_relation(resource_name='identity_provider'),
+ path_vars={
+ 'idp_id': IDP_ID_PARAMETER_RELATION,
+ })
+ self._add_resource(
+ mapper, idp_controller,
+ path=self._construct_url('identity_providers'),
+ get_action='list_identity_providers',
+ rel=build_resource_relation(resource_name='identity_providers'))
+
+ # Protocol CRUD operations
+
+ self._add_resource(
+ mapper, protocol_controller,
+ path=self._construct_url('identity_providers/{idp_id}/protocols/'
+ '{protocol_id}'),
+ get_action='get_protocol',
+ put_action='create_protocol',
+ patch_action='update_protocol',
+ delete_action='delete_protocol',
+ rel=build_resource_relation(
+ resource_name='identity_provider_protocol'),
+ path_vars={
+ 'idp_id': IDP_ID_PARAMETER_RELATION,
+ 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION,
+ })
+ self._add_resource(
+ mapper, protocol_controller,
+ path=self._construct_url('identity_providers/{idp_id}/protocols'),
+ get_action='list_protocols',
+ rel=build_resource_relation(
+ resource_name='identity_provider_protocols'),
+ path_vars={
+ 'idp_id': IDP_ID_PARAMETER_RELATION,
+ })
+
+ # Mapping CRUD operations
+
+ self._add_resource(
+ mapper, mapping_controller,
+ path=self._construct_url('mappings/{mapping_id}'),
+ get_action='get_mapping',
+ put_action='create_mapping',
+ patch_action='update_mapping',
+ delete_action='delete_mapping',
+ rel=build_resource_relation(resource_name='mapping'),
+ path_vars={
+ 'mapping_id': build_parameter_relation(
+ parameter_name='mapping_id'),
+ })
+ self._add_resource(
+ mapper, mapping_controller,
+ path=self._construct_url('mappings'),
+ get_action='list_mappings',
+ rel=build_resource_relation(resource_name='mappings'))
+
+ # Service Providers CRUD operations
+
+ self._add_resource(
+ mapper, sp_controller,
+ path=self._construct_url('service_providers/{sp_id}'),
+ get_action='get_service_provider',
+ put_action='create_service_provider',
+ patch_action='update_service_provider',
+ delete_action='delete_service_provider',
+ rel=build_resource_relation(resource_name='service_provider'),
+ path_vars={
+ 'sp_id': SP_ID_PARAMETER_RELATION,
+ })
+
+ self._add_resource(
+ mapper, sp_controller,
+ path=self._construct_url('service_providers'),
+ get_action='list_service_providers',
+ rel=build_resource_relation(resource_name='service_providers'))
+
+ self._add_resource(
+ mapper, domain_controller,
+ path=self._construct_url('domains'),
+ get_action='list_domains_for_groups',
+ rel=build_resource_relation(resource_name='domains'))
+ self._add_resource(
+ mapper, project_controller,
+ path=self._construct_url('projects'),
+ get_action='list_projects_for_groups',
+ rel=build_resource_relation(resource_name='projects'))
+ self._add_resource(
+ mapper, auth_controller,
+ path=self._construct_url('identity_providers/{identity_provider}/'
+ 'protocols/{protocol}/auth'),
+ get_post_action='federated_authentication',
+ rel=build_resource_relation(
+ resource_name='identity_provider_protocol_auth'),
+ path_vars={
+ 'identity_provider': IDP_ID_PARAMETER_RELATION,
+ 'protocol': PROTOCOL_ID_PARAMETER_RELATION,
+ })
+
+ # Auth operations
+ self._add_resource(
+ mapper, auth_controller,
+ path='/auth' + self._construct_url('saml2'),
+ post_action='create_saml_assertion',
+ rel=build_resource_relation(resource_name='saml2'))
+ self._add_resource(
+ mapper, auth_controller,
+ path='/auth' + self._construct_url('websso/{protocol_id}'),
+ get_post_action='federated_sso_auth',
+ rel=build_resource_relation(resource_name='websso'),
+ path_vars={
+ 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION,
+ })
+
+ # Keystone-Identity-Provider metadata endpoint
+ self._add_resource(
+ mapper, saml_metadata_controller,
+ path=self._construct_url('saml2/metadata'),
+ get_action='get_metadata',
+ rel=build_resource_relation(resource_name='metadata'))
diff --git a/keystone-moon/keystone/contrib/federation/schema.py b/keystone-moon/keystone/contrib/federation/schema.py
new file mode 100644
index 00000000..645e1129
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/schema.py
@@ -0,0 +1,78 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import validation
+from keystone.common.validation import parameter_types
+
+
+basic_property_id = {
+ 'type': 'object',
+ 'properties': {
+ 'id': {
+ 'type': 'string'
+ }
+ },
+ 'required': ['id'],
+ 'additionalProperties': False
+}
+
+saml_create = {
+ 'type': 'object',
+ 'properties': {
+ 'identity': {
+ 'type': 'object',
+ 'properties': {
+ 'token': basic_property_id,
+ 'methods': {
+ 'type': 'array'
+ }
+ },
+ 'required': ['token'],
+ 'additionalProperties': False
+ },
+ 'scope': {
+ 'type': 'object',
+ 'properties': {
+ 'service_provider': basic_property_id
+ },
+ 'required': ['service_provider'],
+ 'additionalProperties': False
+ },
+ },
+ 'required': ['identity', 'scope'],
+ 'additionalProperties': False
+}
+
+_service_provider_properties = {
+ # NOTE(rodrigods): The database accepts URLs with 256 as max length,
+ # but parameter_types.url uses 225 as max length.
+ 'auth_url': parameter_types.url,
+ 'sp_url': parameter_types.url,
+ 'description': validation.nullable(parameter_types.description),
+ 'enabled': parameter_types.boolean
+}
+
+service_provider_create = {
+ 'type': 'object',
+ 'properties': _service_provider_properties,
+ # NOTE(rodrigods): 'id' is not required since it is passed in the URL
+ 'required': ['auth_url', 'sp_url'],
+ 'additionalProperties': False
+}
+
+service_provider_update = {
+ 'type': 'object',
+ 'properties': _service_provider_properties,
+ # Make sure at least one property is being updated
+ 'minProperties': 1,
+ 'additionalProperties': False
+}
diff --git a/keystone-moon/keystone/contrib/federation/utils.py b/keystone-moon/keystone/contrib/federation/utils.py
new file mode 100644
index 00000000..939fe9a0
--- /dev/null
+++ b/keystone-moon/keystone/contrib/federation/utils.py
@@ -0,0 +1,763 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Utilities for Federation Extension."""
+
+import ast
+import re
+
+import jsonschema
+from oslo_config import cfg
+from oslo_log import log
+from oslo_utils import timeutils
+import six
+
+from keystone.contrib import federation
+from keystone import exception
+from keystone.i18n import _, _LW
+
+
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+
+
+MAPPING_SCHEMA = {
+ "type": "object",
+ "required": ['rules'],
+ "properties": {
+ "rules": {
+ "minItems": 1,
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": ['local', 'remote'],
+ "additionalProperties": False,
+ "properties": {
+ "local": {
+ "type": "array"
+ },
+ "remote": {
+ "minItems": 1,
+ "type": "array",
+ "items": {
+ "type": "object",
+ "oneOf": [
+ {"$ref": "#/definitions/empty"},
+ {"$ref": "#/definitions/any_one_of"},
+ {"$ref": "#/definitions/not_any_of"},
+ {"$ref": "#/definitions/blacklist"},
+ {"$ref": "#/definitions/whitelist"}
+ ],
+ }
+ }
+ }
+ }
+ }
+ },
+ "definitions": {
+ "empty": {
+ "type": "object",
+ "required": ['type'],
+ "properties": {
+ "type": {
+ "type": "string"
+ },
+ },
+ "additionalProperties": False,
+ },
+ "any_one_of": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ['type', 'any_one_of'],
+ "properties": {
+ "type": {
+ "type": "string"
+ },
+ "any_one_of": {
+ "type": "array"
+ },
+ "regex": {
+ "type": "boolean"
+ }
+ }
+ },
+ "not_any_of": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ['type', 'not_any_of'],
+ "properties": {
+ "type": {
+ "type": "string"
+ },
+ "not_any_of": {
+ "type": "array"
+ },
+ "regex": {
+ "type": "boolean"
+ }
+ }
+ },
+ "blacklist": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ['type', 'blacklist'],
+ "properties": {
+ "type": {
+ "type": "string"
+ },
+ "blacklist": {
+ "type": "array"
+ }
+ }
+ },
+ "whitelist": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ['type', 'whitelist'],
+ "properties": {
+ "type": {
+ "type": "string"
+ },
+ "whitelist": {
+ "type": "array"
+ }
+ }
+ }
+ }
+}
+
+
+class DirectMaps(object):
+ """An abstraction around the remote matches.
+
+ Each match is treated internally as a list.
+ """
+
+ def __init__(self):
+ self._matches = []
+
+ def add(self, values):
+ """Adds a matched value to the list of matches.
+
+ :param list value: the match to save
+
+ """
+ self._matches.append(values)
+
+ def __getitem__(self, idx):
+ """Used by Python when executing ``''.format(*DirectMaps())``."""
+ value = self._matches[idx]
+ if isinstance(value, list) and len(value) == 1:
+ return value[0]
+ else:
+ return value
+
+
+def validate_mapping_structure(ref):
+ v = jsonschema.Draft4Validator(MAPPING_SCHEMA)
+
+ messages = ''
+ for error in sorted(v.iter_errors(ref), key=str):
+ messages = messages + error.message + "\n"
+
+ if messages:
+ raise exception.ValidationError(messages)
+
+
+def validate_expiration(token_ref):
+ if timeutils.utcnow() > token_ref.expires:
+ raise exception.Unauthorized(_('Federation token is expired'))
+
+
+def validate_groups_cardinality(group_ids, mapping_id):
+ """Check if groups list is non-empty.
+
+ :param group_ids: list of group ids
+ :type group_ids: list of str
+
+ :raises exception.MissingGroups: if ``group_ids`` cardinality is 0
+
+ """
+ if not group_ids:
+ raise exception.MissingGroups(mapping_id=mapping_id)
+
+
+def validate_idp(idp, assertion):
+ """Check if the IdP providing the assertion is the one registered for
+ the mapping
+ """
+ remote_id_parameter = CONF.federation.remote_id_attribute
+ if not remote_id_parameter or not idp['remote_id']:
+ LOG.warning(_LW('Impossible to identify the IdP %s '),
+ idp['id'])
+ # If nothing is defined, the administrator may want to
+ # allow the mapping of every IdP
+ return
+ try:
+ idp_remote_identifier = assertion[remote_id_parameter]
+ except KeyError:
+ msg = _('Could not find Identity Provider identifier in '
+ 'environment, check [federation] remote_id_attribute '
+ 'for details.')
+ raise exception.ValidationError(msg)
+ if idp_remote_identifier != idp['remote_id']:
+ msg = _('Incoming identity provider identifier not included '
+ 'among the accepted identifiers.')
+ raise exception.Forbidden(msg)
+
+
+def validate_groups_in_backend(group_ids, mapping_id, identity_api):
+ """Iterate over group ids and make sure they are present in the backend/
+
+ This call is not transactional.
+ :param group_ids: IDs of the groups to be checked
+ :type group_ids: list of str
+
+ :param mapping_id: id of the mapping used for this operation
+ :type mapping_id: str
+
+ :param identity_api: Identity Manager object used for communication with
+ backend
+ :type identity_api: identity.Manager
+
+ :raises: exception.MappedGroupNotFound
+
+ """
+ for group_id in group_ids:
+ try:
+ identity_api.get_group(group_id)
+ except exception.GroupNotFound:
+ raise exception.MappedGroupNotFound(
+ group_id=group_id, mapping_id=mapping_id)
+
+
+def validate_groups(group_ids, mapping_id, identity_api):
+ """Check group ids cardinality and check their existence in the backend.
+
+ This call is not transactional.
+ :param group_ids: IDs of the groups to be checked
+ :type group_ids: list of str
+
+ :param mapping_id: id of the mapping used for this operation
+ :type mapping_id: str
+
+ :param identity_api: Identity Manager object used for communication with
+ backend
+ :type identity_api: identity.Manager
+
+ :raises: exception.MappedGroupNotFound
+ :raises: exception.MissingGroups
+
+ """
+ validate_groups_cardinality(group_ids, mapping_id)
+ validate_groups_in_backend(group_ids, mapping_id, identity_api)
+
+
+# TODO(marek-denis): Optimize this function, so the number of calls to the
+# backend are minimized.
+def transform_to_group_ids(group_names, mapping_id,
+ identity_api, assignment_api):
+ """Transform groups identitified by name/domain to their ids
+
+ Function accepts list of groups identified by a name and domain giving
+ a list of group ids in return.
+
+ Example of group_names parameter::
+
+ [
+ {
+ "name": "group_name",
+ "domain": {
+ "id": "domain_id"
+ },
+ },
+ {
+ "name": "group_name_2",
+ "domain": {
+ "name": "domain_name"
+ }
+ }
+ ]
+
+ :param group_names: list of group identified by name and its domain.
+ :type group_names: list
+
+ :param mapping_id: id of the mapping used for mapping assertion into
+ local credentials
+ :type mapping_id: str
+
+ :param identity_api: identity_api object
+ :param assignment_api: assignment_api object
+
+ :returns: generator object with group ids
+
+ :raises: excepton.MappedGroupNotFound: in case asked group doesn't
+ exist in the backend.
+
+ """
+
+ def resolve_domain(domain):
+ """Return domain id.
+
+ Input is a dictionary with a domain identified either by a ``id`` or a
+ ``name``. In the latter case system will attempt to fetch domain object
+ from the backend.
+
+ :returns: domain's id
+ :rtype: str
+
+ """
+ domain_id = (domain.get('id') or
+ assignment_api.get_domain_by_name(
+ domain.get('name')).get('id'))
+ return domain_id
+
+ for group in group_names:
+ try:
+ group_dict = identity_api.get_group_by_name(
+ group['name'], resolve_domain(group['domain']))
+ yield group_dict['id']
+ except exception.GroupNotFound:
+ LOG.debug('Skip mapping group %s; has no entry in the backend',
+ group['name'])
+
+
+def get_assertion_params_from_env(context):
+ LOG.debug('Environment variables: %s', context['environment'])
+ prefix = CONF.federation.assertion_prefix
+ for k, v in context['environment'].items():
+ if k.startswith(prefix):
+ yield (k, v)
+
+
+class UserType(object):
+ """User mapping type."""
+ EPHEMERAL = 'ephemeral'
+ LOCAL = 'local'
+
+
+class RuleProcessor(object):
+ """A class to process assertions and mapping rules."""
+
+ class _EvalType(object):
+ """Mapping rule evaluation types."""
+ ANY_ONE_OF = 'any_one_of'
+ NOT_ANY_OF = 'not_any_of'
+ BLACKLIST = 'blacklist'
+ WHITELIST = 'whitelist'
+
+ def __init__(self, rules):
+ """Initialize RuleProcessor.
+
+ Example rules can be found at:
+ :class:`keystone.tests.mapping_fixtures`
+
+ :param rules: rules from a mapping
+ :type rules: dict
+
+ """
+
+ self.rules = rules
+
+ def process(self, assertion_data):
+ """Transform assertion to a dictionary of user name and group ids
+ based on mapping rules.
+
+ This function will iterate through the mapping rules to find
+ assertions that are valid.
+
+ :param assertion_data: an assertion containing values from an IdP
+ :type assertion_data: dict
+
+ Example assertion_data::
+
+ {
+ 'Email': 'testacct@example.com',
+ 'UserName': 'testacct',
+ 'FirstName': 'Test',
+ 'LastName': 'Account',
+ 'orgPersonType': 'Tester'
+ }
+
+ :returns: dictionary with user and group_ids
+
+ The expected return structure is::
+
+ {
+ 'name': 'foobar',
+ 'group_ids': ['abc123', 'def456'],
+ 'group_names': [
+ {
+ 'name': 'group_name_1',
+ 'domain': {
+ 'name': 'domain1'
+ }
+ },
+ {
+ 'name': 'group_name_1_1',
+ 'domain': {
+ 'name': 'domain1'
+ }
+ },
+ {
+ 'name': 'group_name_2',
+ 'domain': {
+ 'id': 'xyz132'
+ }
+ }
+ ]
+ }
+
+ """
+
+ # Assertions will come in as string key-value pairs, and will use a
+ # semi-colon to indicate multiple values, i.e. groups.
+ # This will create a new dictionary where the values are arrays, and
+ # any multiple values are stored in the arrays.
+ LOG.debug('assertion data: %s', assertion_data)
+ assertion = {n: v.split(';') for n, v in assertion_data.items()
+ if isinstance(v, six.string_types)}
+ LOG.debug('assertion: %s', assertion)
+ identity_values = []
+
+ LOG.debug('rules: %s', self.rules)
+ for rule in self.rules:
+ direct_maps = self._verify_all_requirements(rule['remote'],
+ assertion)
+
+ # If the compare comes back as None, then the rule did not apply
+ # to the assertion data, go on to the next rule
+ if direct_maps is None:
+ continue
+
+ # If there are no direct mappings, then add the local mapping
+ # directly to the array of saved values. However, if there is
+ # a direct mapping, then perform variable replacement.
+ if not direct_maps:
+ identity_values += rule['local']
+ else:
+ for local in rule['local']:
+ new_local = self._update_local_mapping(local, direct_maps)
+ identity_values.append(new_local)
+
+ LOG.debug('identity_values: %s', identity_values)
+ mapped_properties = self._transform(identity_values)
+ LOG.debug('mapped_properties: %s', mapped_properties)
+ return mapped_properties
+
+ def _transform(self, identity_values):
+ """Transform local mappings, to an easier to understand format.
+
+ Transform the incoming array to generate the return value for
+ the process function. Generating content for Keystone tokens will
+ be easier if some pre-processing is done at this level.
+
+ :param identity_values: local mapping from valid evaluations
+ :type identity_values: array of dict
+
+ Example identity_values::
+
+ [
+ {
+ 'group': {'id': '0cd5e9'},
+ 'user': {
+ 'email': 'bob@example.com'
+ },
+ },
+ {
+ 'groups': ['member', 'admin', tester'],
+ 'domain': {
+ 'name': 'default_domain'
+ }
+ }
+ ]
+
+ :returns: dictionary with user name, group_ids and group_names.
+ :rtype: dict
+
+ """
+
+ def extract_groups(groups_by_domain):
+ for groups in groups_by_domain.values():
+ for group in {g['name']: g for g in groups}.values():
+ yield group
+
+ def normalize_user(user):
+ """Parse and validate user mapping."""
+
+ user_type = user.get('type')
+
+ if user_type and user_type not in (UserType.EPHEMERAL,
+ UserType.LOCAL):
+ msg = _("User type %s not supported") % user_type
+ raise exception.ValidationError(msg)
+
+ if user_type is None:
+ user_type = user['type'] = UserType.EPHEMERAL
+
+ if user_type == UserType.EPHEMERAL:
+ user['domain'] = {
+ 'id': (CONF.federation.federated_domain_name or
+ federation.FEDERATED_DOMAIN_KEYWORD)
+ }
+
+ # initialize the group_ids as a set to eliminate duplicates
+ user = {}
+ group_ids = set()
+ group_names = list()
+ groups_by_domain = dict()
+
+ for identity_value in identity_values:
+ if 'user' in identity_value:
+ # if a mapping outputs more than one user name, log it
+ if user:
+ LOG.warning(_LW('Ignoring user name'))
+ else:
+ user = identity_value.get('user')
+ if 'group' in identity_value:
+ group = identity_value['group']
+ if 'id' in group:
+ group_ids.add(group['id'])
+ elif 'name' in group:
+ domain = (group['domain'].get('name') or
+ group['domain'].get('id'))
+ groups_by_domain.setdefault(domain, list()).append(group)
+ group_names.extend(extract_groups(groups_by_domain))
+ if 'groups' in identity_value:
+ if 'domain' not in identity_value:
+ msg = _("Invalid rule: %(identity_value)s. Both 'groups' "
+ "and 'domain' keywords must be specified.")
+ msg = msg % {'identity_value': identity_value}
+ raise exception.ValidationError(msg)
+ # In this case, identity_value['groups'] is a string
+ # representation of a list, and we want a real list. This is
+ # due to the way we do direct mapping substitutions today (see
+ # function _update_local_mapping() )
+ try:
+ group_names_list = ast.literal_eval(
+ identity_value['groups'])
+ except ValueError:
+ group_names_list = [identity_value['groups']]
+ domain = identity_value['domain']
+ group_dicts = [{'name': name, 'domain': domain} for name in
+ group_names_list]
+
+ group_names.extend(group_dicts)
+
+ normalize_user(user)
+
+ return {'user': user,
+ 'group_ids': list(group_ids),
+ 'group_names': group_names}
+
+ def _update_local_mapping(self, local, direct_maps):
+ """Replace any {0}, {1} ... values with data from the assertion.
+
+ :param local: local mapping reference that needs to be updated
+ :type local: dict
+ :param direct_maps: identity values used to update local
+ :type direct_maps: keystone.contrib.federation.utils.DirectMaps
+
+ Example local::
+
+ {'user': {'name': '{0} {1}', 'email': '{2}'}}
+
+ Example direct_maps::
+
+ ['Bob', 'Thompson', 'bob@example.com']
+
+ :returns: new local mapping reference with replaced values.
+
+ The expected return structure is::
+
+ {'user': {'name': 'Bob Thompson', 'email': 'bob@example.org'}}
+
+ """
+
+ LOG.debug('direct_maps: %s', direct_maps)
+ LOG.debug('local: %s', local)
+ new = {}
+ for k, v in six.iteritems(local):
+ if isinstance(v, dict):
+ new_value = self._update_local_mapping(v, direct_maps)
+ else:
+ new_value = v.format(*direct_maps)
+ new[k] = new_value
+ return new
+
+ def _verify_all_requirements(self, requirements, assertion):
+ """Go through the remote requirements of a rule, and compare against
+ the assertion.
+
+ If a value of ``None`` is returned, the rule with this assertion
+ doesn't apply.
+ If an array of zero length is returned, then there are no direct
+ mappings to be performed, but the rule is valid.
+ Otherwise, then it will first attempt to filter the values according
+ to blacklist or whitelist rules and finally return the values in
+ order, to be directly mapped.
+
+ :param requirements: list of remote requirements from rules
+ :type requirements: list
+
+ Example requirements::
+
+ [
+ {
+ "type": "UserName"
+ },
+ {
+ "type": "orgPersonType",
+ "any_one_of": [
+ "Customer"
+ ]
+ },
+ {
+ "type": "ADFS_GROUPS",
+ "whitelist": [
+ "g1", "g2", "g3", "g4"
+ ]
+ }
+ ]
+
+ :param assertion: dict of attributes from an IdP
+ :type assertion: dict
+
+ Example assertion::
+
+ {
+ 'UserName': ['testacct'],
+ 'LastName': ['Account'],
+ 'orgPersonType': ['Tester'],
+ 'Email': ['testacct@example.com'],
+ 'FirstName': ['Test'],
+ 'ADFS_GROUPS': ['g1', 'g2']
+ }
+
+ :returns: identity values used to update local
+ :rtype: keystone.contrib.federation.utils.DirectMaps
+
+ """
+
+ direct_maps = DirectMaps()
+
+ for requirement in requirements:
+ requirement_type = requirement['type']
+ regex = requirement.get('regex', False)
+
+ any_one_values = requirement.get(self._EvalType.ANY_ONE_OF)
+ if any_one_values is not None:
+ if self._evaluate_requirement(any_one_values,
+ requirement_type,
+ self._EvalType.ANY_ONE_OF,
+ regex,
+ assertion):
+ continue
+ else:
+ return None
+
+ not_any_values = requirement.get(self._EvalType.NOT_ANY_OF)
+ if not_any_values is not None:
+ if self._evaluate_requirement(not_any_values,
+ requirement_type,
+ self._EvalType.NOT_ANY_OF,
+ regex,
+ assertion):
+ continue
+ else:
+ return None
+
+ # If 'any_one_of' or 'not_any_of' are not found, then values are
+ # within 'type'. Attempt to find that 'type' within the assertion,
+ # and filter these values if 'whitelist' or 'blacklist' is set.
+ direct_map_values = assertion.get(requirement_type)
+ if direct_map_values:
+ blacklisted_values = requirement.get(self._EvalType.BLACKLIST)
+ whitelisted_values = requirement.get(self._EvalType.WHITELIST)
+
+ # If a blacklist or whitelist is used, we want to map to the
+ # whole list instead of just its values separately.
+ if blacklisted_values:
+ direct_map_values = [v for v in direct_map_values
+ if v not in blacklisted_values]
+ elif whitelisted_values:
+ direct_map_values = [v for v in direct_map_values
+ if v in whitelisted_values]
+
+ direct_maps.add(direct_map_values)
+
+ LOG.debug('updating a direct mapping: %s', direct_map_values)
+
+ return direct_maps
+
+ def _evaluate_values_by_regex(self, values, assertion_values):
+ for value in values:
+ for assertion_value in assertion_values:
+ if re.search(value, assertion_value):
+ return True
+ return False
+
+ def _evaluate_requirement(self, values, requirement_type,
+ eval_type, regex, assertion):
+ """Evaluate the incoming requirement and assertion.
+
+ If the requirement type does not exist in the assertion data, then
+ return False. If regex is specified, then compare the values and
+ assertion values. Otherwise, grab the intersection of the values
+ and use that to compare against the evaluation type.
+
+ :param values: list of allowed values, defined in the requirement
+ :type values: list
+ :param requirement_type: key to look for in the assertion
+ :type requirement_type: string
+ :param eval_type: determine how to evaluate requirements
+ :type eval_type: string
+ :param regex: perform evaluation with regex
+ :type regex: boolean
+ :param assertion: dict of attributes from the IdP
+ :type assertion: dict
+
+ :returns: boolean, whether requirement is valid or not.
+
+ """
+
+ assertion_values = assertion.get(requirement_type)
+ if not assertion_values:
+ return False
+
+ if regex:
+ any_match = self._evaluate_values_by_regex(values,
+ assertion_values)
+ else:
+ any_match = bool(set(values).intersection(set(assertion_values)))
+ if any_match and eval_type == self._EvalType.ANY_ONE_OF:
+ return True
+ if not any_match and eval_type == self._EvalType.NOT_ANY_OF:
+ return True
+
+ return False
+
+
+def assert_enabled_identity_provider(federation_api, idp_id):
+ identity_provider = federation_api.get_idp(idp_id)
+ if identity_provider.get('enabled') is not True:
+ msg = _('Identity Provider %(idp)s is disabled') % {'idp': idp_id}
+ LOG.debug(msg)
+ raise exception.Forbidden(msg)
+
+
+def assert_enabled_service_provider_object(service_provider):
+ if service_provider.get('enabled') is not True:
+ sp_id = service_provider['id']
+ msg = _('Service Provider %(sp)s is disabled') % {'sp': sp_id}
+ LOG.debug(msg)
+ raise exception.Forbidden(msg)
diff --git a/keystone-moon/keystone/contrib/moon/__init__.py b/keystone-moon/keystone/contrib/moon/__init__.py
new file mode 100644
index 00000000..6a96782e
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/__init__.py
@@ -0,0 +1,8 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+from keystone.contrib.moon.core import * # noqa
+from keystone.contrib.moon import controllers # noqa
+from keystone.contrib.moon import routers # noqa \ No newline at end of file
diff --git a/keystone-moon/keystone/contrib/moon/backends/__init__.py b/keystone-moon/keystone/contrib/moon/backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/backends/__init__.py
diff --git a/keystone-moon/keystone/contrib/moon/backends/flat.py b/keystone-moon/keystone/contrib/moon/backends/flat.py
new file mode 100644
index 00000000..6d18d3ea
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/backends/flat.py
@@ -0,0 +1,123 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+from uuid import uuid4
+import os
+import logging
+import re
+import time
+from keystone import config
+from oslo_log import log
+# from keystone.contrib.moon.core import SuperExtensionDriver
+from keystone.contrib.moon.core import LogDriver
+
+
+CONF = config.CONF
+
+
+class LogConnector(LogDriver):
+
+ AUTHZ_FILE = '/var/log/moon/authz.log'
+ TIME_FORMAT = '%Y-%m-%d-%H:%M:%S'
+
+ def __init__(self):
+ # Fixme (dthom): when logging from an other class, the %appname% in the event
+ # is always keystone.contrib.moon.backends.flat
+ super(LogConnector, self).__init__()
+ # Configure Log to add new files in /var/log/moon/authz.log and /var/log/moon/system.log
+ self.LOG = log.getLogger(__name__)
+ self.AUTHZ_LOG = logging.getLogger("authz")
+ self.AUTHZ_LOG.setLevel(logging.WARNING)
+ fh = logging.FileHandler(self.AUTHZ_FILE)
+ fh.setLevel(logging.WARNING)
+ formatter = logging.Formatter('%(asctime)s ------ %(message)s', self.TIME_FORMAT)
+ fh.setFormatter(formatter)
+ self.AUTHZ_LOG.addHandler(fh)
+
+ def authz(self, message):
+ self.AUTHZ_LOG.warn(message)
+
+ def debug(self, message):
+ self.LOG.debug(message)
+
+ def info(self, message):
+ self.LOG.info(message)
+
+ def warning(self, message):
+ self.LOG.warning(message)
+
+ def error(self, message):
+ self.LOG.error(message)
+
+ def critical(self, message):
+ self.LOG.critical(message)
+
+ def get_logs(self, options):
+ options = options.split(",")
+ self.info("Options of logs check : {}".format(options))
+ event_number = None
+ time_from = None
+ time_to = None
+ filter_str = None
+ for opt in options:
+ if "event_number" in opt:
+ event_number = "".join(re.findall("\d*", opt.split("=")[-1]))
+ try:
+ event_number = int(event_number)
+ except ValueError:
+ event_number = None
+ elif "from" in opt:
+ time_from = "".join(re.findall("[\w\-:]*", opt.split("=")[-1]))
+ try:
+ time_from = time.strptime(time_from, self.TIME_FORMAT)
+ except ValueError:
+ time_from = None
+ elif "to" in opt:
+ time_to = "".join(re.findall("[\w\-:] *", opt.split("=")[-1]))
+ try:
+ time_to = time.strptime(time_to, self.TIME_FORMAT)
+ except ValueError:
+ time_to = None
+ elif "filter" in opt:
+ filter_str = "".join(re.findall("\w*", opt.split("=")[-1]))
+ _logs = open(self.AUTHZ_FILE).readlines()
+ if filter_str:
+ _logs = filter(lambda x: filter_str in x, _logs)
+ self.info("Options of logs check : {} {} {} {}".format(event_number, time_from, time_to, filter_str))
+ if time_from:
+ try:
+ for log in _logs:
+ __logs = filter(lambda x: time_from <= time.strptime(x.split(" ")[0], self.TIME_FORMAT), _logs)
+ _logs = __logs
+ except ValueError:
+ self.error("Time format error")
+ if time_to:
+ try:
+ for log in _logs:
+ __logs = filter(lambda x: time_to >= time.strptime(x.split(" ")[0], self.TIME_FORMAT), _logs)
+ _logs = __logs
+ except ValueError:
+ self.error("Time format error")
+ if event_number:
+ _logs = _logs[-event_number:]
+ return list(_logs)
+
+
+# class SuperExtensionConnector(SuperExtensionDriver):
+#
+# def __init__(self):
+# super(SuperExtensionConnector, self).__init__()
+# # Super_Extension is loaded every time the server is started
+# self.__uuid = uuid4().hex
+# # self.__super_extension = Extension()
+# _policy_abs_dir = os.path.join(CONF.moon.super_extension_directory, 'policy')
+# # self.__super_extension.load_from_json(_policy_abs_dir)
+#
+# def get_super_extensions(self):
+# return None
+#
+# def admin(self, sub, obj, act):
+# # return self.__super_extension.authz(sub, obj, act)
+# return True \ No newline at end of file
diff --git a/keystone-moon/keystone/contrib/moon/backends/sql.py b/keystone-moon/keystone/contrib/moon/backends/sql.py
new file mode 100644
index 00000000..5f76e235
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/backends/sql.py
@@ -0,0 +1,1537 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import six
+from uuid import uuid4
+import copy
+
+from keystone import config
+from oslo_log import log
+from keystone.common import sql
+from keystone import exception
+from keystone.contrib.moon.exception import *
+from oslo_serialization import jsonutils
+from keystone.contrib.moon import IntraExtensionDriver
+from keystone.contrib.moon import TenantDriver
+# from keystone.contrib.moon import InterExtensionDriver
+
+from keystone.contrib.moon.exception import TenantError, TenantListEmptyError
+
+CONF = config.CONF
+LOG = log.getLogger(__name__)
+
+
+class IntraExtension(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'intra_extension'
+ attributes = ['id', 'name', 'model', 'description']
+ id = sql.Column(sql.String(64), primary_key=True)
+ name = sql.Column(sql.String(64), nullable=False)
+ model = sql.Column(sql.String(64), nullable=True)
+ description = sql.Column(sql.Text())
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class Subject(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'subject'
+ attributes = ['id', 'subjects', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ subjects = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class Object(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'object'
+ attributes = ['id', 'objects', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ objects = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class Action(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'action'
+ attributes = ['id', 'actions', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ actions = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class SubjectCategory(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'subject_category'
+ attributes = ['id', 'subject_categories', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ subject_categories = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class ObjectCategory(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'object_category'
+ attributes = ['id', 'object_categories', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ object_categories = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class ActionCategory(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'action_category'
+ attributes = ['id', 'action_categories', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ action_categories = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class SubjectCategoryScope(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'subject_category_scope'
+ attributes = ['id', 'subject_category_scope', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ subject_category_scope = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class ObjectCategoryScope(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'object_category_scope'
+ attributes = ['id', 'object_category_scope', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ object_category_scope = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class ActionCategoryScope(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'action_category_scope'
+ attributes = ['id', 'action_category_scope', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ action_category_scope = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class SubjectCategoryAssignment(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'subject_category_assignment'
+ attributes = ['id', 'subject_category_assignments', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ subject_category_assignments = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class ObjectCategoryAssignment(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'object_category_assignment'
+ attributes = ['id', 'object_category_assignments', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ object_category_assignments = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class ActionCategoryAssignment(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'action_category_assignment'
+ attributes = ['id', 'action_category_assignments', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ action_category_assignments = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class MetaRule(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'metarule'
+ attributes = ['id', 'sub_meta_rules', 'aggregation', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ sub_meta_rules = sql.Column(sql.JsonBlob(), nullable=True)
+ aggregation = sql.Column(sql.Text(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class Rule(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'rule'
+ attributes = ['id', 'rules', 'intra_extension_uuid']
+ id = sql.Column(sql.String(64), primary_key=True)
+ rules = sql.Column(sql.JsonBlob(), nullable=True)
+ intra_extension_uuid = sql.Column(sql.ForeignKey("intra_extension.id"), nullable=False)
+
+ @classmethod
+ def from_dict(cls, d):
+ new_d = d.copy()
+ return cls(**new_d)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class Tenant(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'tenants'
+ attributes = [
+ 'id', 'name', 'authz', 'admin'
+ ]
+ id = sql.Column(sql.String(64), primary_key=True, nullable=False)
+ name = sql.Column(sql.String(128), nullable=True)
+ authz = sql.Column(sql.String(64), nullable=True)
+ admin = sql.Column(sql.String(64), nullable=True)
+
+ @classmethod
+ def from_dict(cls, d):
+ """Override parent from_dict() method with a different implementation.
+ """
+ new_d = d.copy()
+ uuid = new_d.keys()[0]
+ return cls(id=uuid, **new_d[uuid])
+
+ def to_dict(self):
+ """
+ """
+ tenant_dict = {}
+ for key in ("id", "name", "authz", "admin"):
+ tenant_dict[key] = getattr(self, key)
+ return tenant_dict
+
+__all_objects__ = (
+ Subject,
+ Object,
+ Action,
+ SubjectCategory,
+ ObjectCategory,
+ ActionCategory,
+ SubjectCategoryScope,
+ ObjectCategoryScope,
+ ActionCategoryScope,
+ SubjectCategoryAssignment,
+ ObjectCategoryAssignment,
+ ActionCategoryAssignment,
+ MetaRule,
+ Rule,
+)
+
+class IntraExtensionConnector(IntraExtensionDriver):
+
+ def get_intra_extension_list(self):
+ with sql.transaction() as session:
+ query = session.query(IntraExtension.id)
+ intraextensions = query.all()
+ # return intraextensions
+ return [intraextension[0] for intraextension in intraextensions]
+
+ def set_intra_extension(self, intra_id, intra_extension):
+ with sql.transaction() as session:
+ # intra_extension["admin"] = jsonutils.dumps(intra_extension["admin"])
+ # intra_extension["authz"] = jsonutils.dumps(intra_extension["authz"])
+ ie_ref = IntraExtension.from_dict(intra_extension)
+ session.add(ie_ref)
+ return IntraExtension.to_dict(ie_ref)
+
+ def get_intra_extension(self, uuid):
+ with sql.transaction() as session:
+ query = session.query(IntraExtension)
+ query = query.filter_by(id=uuid)
+ ref = query.first()
+ if not ref:
+ raise exception.NotFound
+ return ref.to_dict()
+
+ def delete_intra_extension(self, intra_extension_id):
+ with sql.transaction() as session:
+ ref = session.query(IntraExtension).get(intra_extension_id)
+ # Must delete all references to that IntraExtension
+ for _object in __all_objects__:
+ query = session.query(_object)
+ query = query.filter_by(intra_extension_uuid=intra_extension_id)
+ _ref = query.first()
+ if _ref:
+ session.delete(_ref)
+ session.flush()
+ session.delete(ref)
+
+ # Getter and setter for name
+
+ def get_name(self, uuid):
+ intra_extension = self.get_intra_extension(uuid)
+ return intra_extension["name"]
+
+ def set_name(self, uuid, name):
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and setter for model
+
+ def get_model(self, uuid):
+ intra_extension = self.get_intra_extension(uuid)
+ return intra_extension["model"]
+
+ def set_model(self, uuid, model):
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and setter for description
+
+ def get_description(self, uuid):
+ intra_extension = self.get_intra_extension(uuid)
+ return intra_extension["description"]
+
+ def set_description(self, uuid, args):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_subject_dict(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(Subject)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_subject_dict(self, extension_uuid, subject_uuid):
+ with sql.transaction() as session:
+ query = session.query(Subject)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ new_ref = Subject.from_dict(
+ {
+ "id": uuid4().hex,
+ 'subjects': subject_uuid,
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in Subject.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_subject(self, extension_uuid, subject_uuid, subject_name):
+ with sql.transaction() as session:
+ query = session.query(Subject)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ subjects = dict(old_ref["subjects"])
+ subjects[subject_uuid] = subject_name
+ new_ref = Subject.from_dict(
+ {
+ "id": old_ref["id"],
+ 'subjects': subjects,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in Subject.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return {"subject": {"uuid": subject_uuid, "name": subject_name}}
+
+ def remove_subject(self, extension_uuid, subject_uuid):
+ with sql.transaction() as session:
+ query = session.query(Subject)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ else:
+ old_ref = ref.to_dict()
+ subjects = dict(old_ref["subjects"])
+ try:
+ subjects.pop(subject_uuid)
+ except KeyError:
+ LOG.error("KeyError in remove_subject {} | {}".format(subject_uuid, subjects))
+ else:
+ new_ref = Subject.from_dict(
+ {
+ "id": old_ref["id"],
+ 'subjects': subjects,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in Subject.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+
+ def get_object_dict(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(Object)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_object_dict(self, extension_uuid, object_uuid):
+ with sql.transaction() as session:
+ query = session.query(Object)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ new_ref = Object.from_dict(
+ {
+ "id": uuid4().hex,
+ 'objects': object_uuid,
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in Object.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_object(self, extension_uuid, object_uuid, object_name):
+ with sql.transaction() as session:
+ query = session.query(Object)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ objects = dict(old_ref["objects"])
+ objects[object_uuid] = object_name
+ new_ref = Object.from_dict(
+ {
+ "id": old_ref["id"],
+ 'objects': objects,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in Object.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return {"object": {"uuid": object_uuid, "name": object_name}}
+
+ def remove_object(self, extension_uuid, object_uuid):
+ with sql.transaction() as session:
+ query = session.query(Object)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ else:
+ old_ref = ref.to_dict()
+ objects = dict(old_ref["objects"])
+ try:
+ objects.pop(object_uuid)
+ except KeyError:
+ LOG.error("KeyError in remove_object {} | {}".format(object_uuid, objects))
+ else:
+ new_ref = Object.from_dict(
+ {
+ "id": old_ref["id"],
+ 'objects': objects,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in Object.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+
+ def get_action_dict(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(Action)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_action_dict(self, extension_uuid, action_uuid):
+ with sql.transaction() as session:
+ query = session.query(Action)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ new_ref = Action.from_dict(
+ {
+ "id": uuid4().hex,
+ 'actions': action_uuid,
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in Action.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_action(self, extension_uuid, action_uuid, action_name):
+ with sql.transaction() as session:
+ query = session.query(Action)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ actions = dict(old_ref["actions"])
+ actions[action_uuid] = action_name
+ new_ref = Action.from_dict(
+ {
+ "id": old_ref["id"],
+ 'actions': actions,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in Action.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return {"action": {"uuid": action_uuid, "name": action_name}}
+
+ def remove_action(self, extension_uuid, action_uuid):
+ with sql.transaction() as session:
+ query = session.query(Action)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ else:
+ old_ref = ref.to_dict()
+ actions = dict(old_ref["actions"])
+ try:
+ actions.pop(action_uuid)
+ except KeyError:
+ LOG.error("KeyError in remove_action {} | {}".format(action_uuid, actions))
+ else:
+ new_ref = Action.from_dict(
+ {
+ "id": old_ref["id"],
+ 'actions': actions,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in Action.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+
+ # Getter and Setter for subject_category
+
+ def get_subject_category_dict(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_subject_category_dict(self, extension_uuid, subject_categories):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ new_ref = SubjectCategory.from_dict(
+ {
+ "id": uuid4().hex,
+ 'subject_categories': subject_categories,
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in SubjectCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_subject_category_dict(self, extension_uuid, subject_category_uuid, subject_category_name):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ subject_categories = dict(old_ref["subject_categories"])
+ subject_categories[subject_category_uuid] = subject_category_name
+ new_ref = SubjectCategory.from_dict(
+ {
+ "id": old_ref["id"],
+ 'subject_categories': subject_categories,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in SubjectCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return {"subject_category": {"uuid": subject_category_uuid, "name": subject_category_name}}
+
+ def remove_subject_category(self, extension_uuid, subject_category_uuid):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ else:
+ old_ref = ref.to_dict()
+ subject_categories = dict(old_ref["subject_categories"])
+ try:
+ subject_categories.pop(subject_category_uuid)
+ except KeyError:
+ pass
+ else:
+ new_ref = SubjectCategory.from_dict(
+ {
+ "id": old_ref["id"],
+ 'subject_categories': subject_categories,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in SubjectCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ # Getter and Setter for object_category
+
+ def get_object_category_dict(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_object_category_dict(self, extension_uuid, object_categories):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ new_ref = ObjectCategory.from_dict(
+ {
+ "id": uuid4().hex,
+ 'object_categories': object_categories,
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in ObjectCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_object_category_dict(self, extension_uuid, object_category_uuid, object_category_name):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ object_categories = dict(old_ref["object_categories"])
+ object_categories[object_category_uuid] = object_category_name
+ new_ref = ObjectCategory.from_dict(
+ {
+ "id": old_ref["id"],
+ 'object_categories': object_categories,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in ObjectCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return {"object_category": {"uuid": object_category_uuid, "name": object_category_name}}
+
+ def remove_object_category(self, extension_uuid, object_category_uuid):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ else:
+ old_ref = ref.to_dict()
+ object_categories = dict(old_ref["object_categories"])
+ try:
+ object_categories.pop(object_category_uuid)
+ except KeyError:
+ pass
+ else:
+ new_ref = ObjectCategory.from_dict(
+ {
+ "id": old_ref["id"],
+ 'object_categories': object_categories,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in ObjectCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ # Getter and Setter for action_category
+
+ def get_action_category_dict(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(ActionCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_action_category_dict(self, extension_uuid, action_categories):
+ with sql.transaction() as session:
+ query = session.query(ActionCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ new_ref = ActionCategory.from_dict(
+ {
+ "id": uuid4().hex,
+ 'action_categories': action_categories,
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in ActionCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_action_category_dict(self, extension_uuid, action_category_uuid, action_category_name):
+ with sql.transaction() as session:
+ query = session.query(ActionCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ action_categories = dict(old_ref["action_categories"])
+ action_categories[action_category_uuid] = action_category_name
+ new_ref = ActionCategory.from_dict(
+ {
+ "id": old_ref["id"],
+ 'action_categories': action_categories,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in ActionCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return {"action_category": {"uuid": action_category_uuid, "name": action_category_name}}
+
+ def remove_action_category(self, extension_uuid, action_category_uuid):
+ with sql.transaction() as session:
+ query = session.query(ActionCategory)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ else:
+ old_ref = ref.to_dict()
+ action_categories = dict(old_ref["action_categories"])
+ try:
+ action_categories.pop(action_category_uuid)
+ except KeyError:
+ pass
+ else:
+ new_ref = ActionCategory.from_dict(
+ {
+ "id": old_ref["id"],
+ 'action_categories': action_categories,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in ActionCategory.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ # Getter and Setter for subject_category_value_scope
+
+ def get_subject_category_scope_dict(self, extension_uuid, subject_category):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ result = copy.deepcopy(ref.to_dict())
+ if subject_category not in result["subject_category_scope"].keys():
+ raise CategoryNotFound()
+ result["subject_category_scope"] = {subject_category: result["subject_category_scope"][subject_category]}
+ return result
+
+ def set_subject_category_scope_dict(self, extension_uuid, subject_category, scope):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ new_ref = SubjectCategoryScope.from_dict(
+ {
+ "id": uuid4().hex,
+ 'subject_category_scope': {subject_category: scope},
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ tmp_ref = ref.to_dict()
+ tmp_ref['subject_category_scope'].update({subject_category: scope})
+ session.delete(ref)
+ new_ref = SubjectCategoryScope.from_dict(tmp_ref)
+ session.add(new_ref)
+ return ref.to_dict()
+
+ def add_subject_category_scope_dict(self, extension_uuid, subject_category, scope_uuid, scope_name):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ scope = copy.deepcopy(old_ref["subject_category_scope"])
+ if subject_category not in scope.keys():
+ scope[subject_category] = dict()
+ scope[subject_category][scope_uuid] = scope_name
+ self.set_subject_category_scope_dict(extension_uuid, subject_category, scope[subject_category])
+ return {"subject_category_scope": {"uuid": scope_uuid, "name": scope_name}}
+
+ def remove_subject_category_scope_dict(self, extension_uuid, subject_category, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ scope = dict(old_ref["subject_category_scope"])
+ if subject_category not in scope:
+ return
+ try:
+ scope[subject_category].pop(scope_uuid)
+ except KeyError:
+ return
+ new_ref = SubjectCategoryScope.from_dict(
+ {
+ "id": old_ref["id"],
+ 'subject_category_scope': scope,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in SubjectCategoryScope.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ # Getter and Setter for object_category_scope
+
+ def get_object_category_scope_dict(self, extension_uuid, object_category):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ result = copy.deepcopy(ref.to_dict())
+ if object_category not in result["object_category_scope"].keys():
+ raise CategoryNotFound()
+ result["object_category_scope"] = {object_category: result["object_category_scope"][object_category]}
+ return result
+
+ def set_object_category_scope_dict(self, extension_uuid, object_category, scope):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ new_ref = ObjectCategoryScope.from_dict(
+ {
+ "id": uuid4().hex,
+ 'object_category_scope': {object_category: scope},
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ tmp_ref = ref.to_dict()
+ tmp_ref['object_category_scope'].update({object_category: scope})
+ session.delete(ref)
+ new_ref = ObjectCategoryScope.from_dict(tmp_ref)
+ session.add(new_ref)
+ return ref.to_dict()
+
+ def add_object_category_scope_dict(self, extension_uuid, object_category, scope_uuid, scope_name):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ scope = dict(old_ref["object_category_scope"])
+ if object_category not in scope:
+ scope[object_category] = dict()
+ scope[object_category][scope_uuid] = scope_name
+ self.set_object_category_scope_dict(extension_uuid, object_category, scope[object_category])
+ return {"object_category_scope": {"uuid": scope_uuid, "name": scope_name}}
+
+ def remove_object_category_scope_dict(self, extension_uuid, object_category, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ scope = dict(old_ref["object_category_scope"])
+ if object_category not in scope:
+ return
+ try:
+ scope[object_category].pop(scope_uuid)
+ except KeyError:
+ return
+ new_ref = ObjectCategoryScope.from_dict(
+ {
+ "id": old_ref["id"],
+ 'object_category_scope': scope,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in ObjectCategoryScope.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ # Getter and Setter for action_category_scope
+
+ def get_action_category_scope_dict(self, extension_uuid, action_category):
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ result = copy.deepcopy(ref.to_dict())
+ if action_category not in result["action_category_scope"].keys():
+ raise CategoryNotFound("Unknown category id {}/{}".format(action_category, result["action_category_scope"].keys()))
+ result["action_category_scope"] = {action_category: result["action_category_scope"][action_category]}
+ return result
+
+ def set_action_category_scope_dict(self, extension_uuid, action_category, scope):
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ new_ref = ActionCategoryScope.from_dict(
+ {
+ "id": uuid4().hex,
+ 'action_category_scope': {action_category: scope},
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ tmp_ref = ref.to_dict()
+ tmp_ref['action_category_scope'].update({action_category: scope})
+ session.delete(ref)
+ new_ref = ActionCategoryScope.from_dict(tmp_ref)
+ session.add(new_ref)
+ return ref.to_dict()
+
+ def add_action_category_scope_dict(self, extension_uuid, action_category, scope_uuid, scope_name):
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ scope = dict(old_ref["action_category_scope"])
+ if action_category not in scope:
+ scope[action_category] = dict()
+ scope[action_category][scope_uuid] = scope_name
+ self.set_action_category_scope_dict(extension_uuid, action_category, scope[action_category])
+ return {"action_category_scope": {"uuid": scope_uuid, "name": scope_name}}
+
+ def remove_action_category_scope_dict(self, extension_uuid, action_category, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryScope)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ scope = dict(old_ref["action_category_scope"])
+ if action_category not in scope:
+ return
+ try:
+ scope[action_category].pop(scope_uuid)
+ except KeyError:
+ return
+ new_ref = ActionCategoryScope.from_dict(
+ {
+ "id": old_ref["id"],
+ 'action_category_scope': scope,
+ 'intra_extension_uuid': old_ref["intra_extension_uuid"]
+ }
+ )
+ for attr in ActionCategoryScope.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ # Getter and Setter for subject_category_assignment
+
+ def get_subject_category_assignment_dict(self, extension_uuid, subject_uuid):
+ """ From a subject_uuid, return a dictionary of (category: scope for that subject)
+
+ :param extension_uuid: intra extension UUID
+ :param subject_uuid: subject UUID
+ :return: a dictionary of (keys are category nd values are scope for that subject)
+ """
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound("get_subject_category_assignment_dict")
+ _ref = ref.to_dict()
+ if subject_uuid in _ref["subject_category_assignments"]:
+ _backup_dict = _ref["subject_category_assignments"][subject_uuid]
+ _ref["subject_category_assignments"] = dict()
+ _ref["subject_category_assignments"][subject_uuid] = _backup_dict
+ else:
+ _ref["subject_category_assignments"] = dict()
+ _ref["subject_category_assignments"][subject_uuid] = dict()
+ return _ref
+
+ def set_subject_category_assignment_dict(self, extension_uuid, subject_uuid=None, assignment_dict={}):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if type(assignment_dict) is not dict:
+ raise IntraExtensionError()
+ for value in assignment_dict.values():
+ if type(value) is not list:
+ raise IntraExtensionError(str(value))
+ if not subject_uuid:
+ subject_category_assignments = {}
+ else:
+ subject_category_assignments = {subject_uuid: assignment_dict}
+ new_ref = SubjectCategoryAssignment.from_dict(
+ {
+ "id": uuid4().hex,
+ 'subject_category_assignments': subject_category_assignments,
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ new_ref.subject_category_assignments[subject_uuid] = assignment_dict
+ for attr in SubjectCategoryAssignment.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_subject_category_assignment_dict(self, extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ assignments = ref.to_dict()['subject_category_assignments']
+ if subject_uuid not in assignments:
+ assignments[subject_uuid] = dict()
+ if category_uuid not in assignments[subject_uuid]:
+ assignments[subject_uuid][category_uuid] = list()
+ if scope_uuid not in assignments[subject_uuid][category_uuid]:
+ assignments[subject_uuid][category_uuid].append(scope_uuid)
+ return self.set_subject_category_assignment_dict(
+ extension_uuid,
+ subject_uuid,
+ assignments[subject_uuid])
+
+ def remove_subject_category_assignment(self, extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(SubjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ if subject_uuid in old_ref["subject_category_assignments"]:
+ if category_uuid in old_ref["subject_category_assignments"][subject_uuid]:
+ old_ref["subject_category_assignments"][subject_uuid][category_uuid].remove(scope_uuid)
+ if not old_ref["subject_category_assignments"][subject_uuid][category_uuid]:
+ old_ref["subject_category_assignments"][subject_uuid].pop(category_uuid)
+ if not old_ref["subject_category_assignments"][subject_uuid]:
+ old_ref["subject_category_assignments"].pop(subject_uuid)
+ try:
+ self.set_subject_category_assignment_dict(
+ extension_uuid,
+ subject_uuid,
+ old_ref["subject_category_assignments"][subject_uuid])
+ except KeyError:
+ self.set_subject_category_assignment_dict(
+ extension_uuid,
+ subject_uuid,
+ {})
+
+ # Getter and Setter for object_category_assignment
+
+ def get_object_category_assignment_dict(self, extension_uuid, object_uuid):
+ """ From a object_uuid, return a dictionary of (category: scope for that object)
+
+ :param extension_uuid: intra extension UUID
+ :param object_uuid: object UUID
+ :return: a dictionary of (keys are category nd values are scope for that object)
+ """
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ _ref = ref.to_dict()
+ if object_uuid in _ref["object_category_assignments"]:
+ _backup_dict = _ref["object_category_assignments"][object_uuid]
+ _ref["object_category_assignments"] = dict()
+ _ref["object_category_assignments"][object_uuid] = _backup_dict
+ else:
+ _ref["object_category_assignments"] = dict()
+ _ref["object_category_assignments"][object_uuid] = dict()
+ return _ref
+
+ def set_object_category_assignment_dict(self, extension_uuid, object_uuid=None, assignment_dict={}):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if type(assignment_dict) is not dict:
+ raise IntraExtensionError()
+ for value in assignment_dict.values():
+ if type(value) is not list:
+ raise IntraExtensionError(str(value))
+ new_ref = ObjectCategoryAssignment.from_dict(
+ {
+ "id": uuid4().hex,
+ 'object_category_assignments': {object_uuid: assignment_dict},
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ new_ref.object_category_assignments[object_uuid] = assignment_dict
+ for attr in ObjectCategoryAssignment.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_object_category_assignment_dict(self, extension_uuid, object_uuid, category_uuid, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ assignments = ref.to_dict()['object_category_assignments']
+ if object_uuid not in assignments:
+ assignments[object_uuid] = dict()
+ if category_uuid not in assignments[object_uuid]:
+ assignments[object_uuid][category_uuid] = list()
+ if scope_uuid not in assignments[object_uuid][category_uuid]:
+ assignments[object_uuid][category_uuid].append(scope_uuid)
+ return self.set_object_category_assignment_dict(
+ extension_uuid,
+ object_uuid,
+ assignments[object_uuid])
+
+ def remove_object_category_assignment(self, extension_uuid, object_uuid, category_uuid, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(ObjectCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ if object_uuid in old_ref["object_category_assignments"]:
+ if category_uuid in old_ref["object_category_assignments"][object_uuid]:
+ old_ref["object_category_assignments"][object_uuid][category_uuid].remove(scope_uuid)
+ if not old_ref["object_category_assignments"][object_uuid][category_uuid]:
+ old_ref["object_category_assignments"][object_uuid].pop(category_uuid)
+ if not old_ref["object_category_assignments"][object_uuid]:
+ old_ref["object_category_assignments"].pop(object_uuid)
+ self.set_object_category_assignment_dict(
+ extension_uuid,
+ object_uuid,
+ old_ref["object_category_assignments"][object_uuid])
+
+ # Getter and Setter for action_category_assignment
+
+ def get_action_category_assignment_dict(self, extension_uuid, action_uuid):
+ """ From a action_uuid, return a dictionary of (category: scope for that action)
+
+ :param extension_uuid: intra extension UUID
+ :param action_uuid: action UUID
+ :return: a dictionary of (keys are category nd values are scope for that action)
+ """
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ _ref = ref.to_dict()
+ if action_uuid in _ref["action_category_assignments"]:
+ _backup_dict = _ref["action_category_assignments"][action_uuid]
+ _ref["action_category_assignments"] = dict()
+ _ref["action_category_assignments"][action_uuid] = _backup_dict
+ else:
+ _ref["action_category_assignments"] = dict()
+ _ref["action_category_assignments"][action_uuid] = dict()
+ return _ref
+
+ def set_action_category_assignment_dict(self, extension_uuid, action_uuid=None, assignment_dict={}):
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if type(assignment_dict) is not dict:
+ raise IntraExtensionError()
+ for value in assignment_dict.values():
+ if type(value) is not list:
+ raise IntraExtensionError(str(value))
+ new_ref = ActionCategoryAssignment.from_dict(
+ {
+ "id": uuid4().hex,
+ 'action_category_assignments': {action_uuid: assignment_dict},
+ 'intra_extension_uuid': extension_uuid
+ }
+ )
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ new_ref.action_category_assignments[action_uuid] = assignment_dict
+ for attr in ActionCategoryAssignment.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ def add_action_category_assignment_dict(self, extension_uuid, action_uuid, category_uuid, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ assignments = ref.to_dict()['action_category_assignments']
+ if action_uuid not in assignments:
+ assignments[action_uuid] = dict()
+ if category_uuid not in assignments[action_uuid]:
+ assignments[action_uuid][category_uuid] = list()
+ if scope_uuid not in assignments[action_uuid][category_uuid]:
+ assignments[action_uuid][category_uuid].append(scope_uuid)
+ return self.set_action_category_assignment_dict(
+ extension_uuid,
+ action_uuid,
+ assignments[action_uuid])
+
+ def remove_action_category_assignment(self, extension_uuid, action_uuid, category_uuid, scope_uuid):
+ with sql.transaction() as session:
+ query = session.query(ActionCategoryAssignment)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ old_ref = ref.to_dict()
+ if action_uuid in old_ref["action_category_assignments"]:
+ if category_uuid in old_ref["action_category_assignments"][action_uuid]:
+ old_ref["action_category_assignments"][action_uuid][category_uuid].remove(scope_uuid)
+ if not old_ref["action_category_assignments"][action_uuid][category_uuid]:
+ old_ref["action_category_assignments"][action_uuid].pop(category_uuid)
+ if not old_ref["action_category_assignments"][action_uuid]:
+ old_ref["action_category_assignments"].pop(action_uuid)
+ self.set_action_category_assignment_dict(
+ extension_uuid,
+ action_uuid,
+ old_ref["action_category_assignments"][action_uuid])
+
+ # Getter and Setter for meta_rule
+
+ def get_meta_rule_dict(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(MetaRule)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_meta_rule_dict(self, extension_uuid, meta_rule):
+ with sql.transaction() as session:
+ query = session.query(MetaRule)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ meta_rule["id"] = uuid4().hex
+ meta_rule["intra_extension_uuid"] = extension_uuid
+ new_ref = MetaRule.from_dict(meta_rule)
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in MetaRule.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+ # Getter and Setter for rules
+
+ def get_rules(self, extension_uuid):
+ with sql.transaction() as session:
+ query = session.query(Rule)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ if not ref:
+ raise IntraExtensionNotFound()
+ return ref.to_dict()
+
+ def set_rules(self, extension_uuid, subrules):
+ with sql.transaction() as session:
+ query = session.query(Rule)
+ query = query.filter_by(intra_extension_uuid=extension_uuid)
+ ref = query.first()
+ rules = dict()
+ rules["id"] = uuid4().hex
+ rules["intra_extension_uuid"] = extension_uuid
+ rules["rules"] = subrules
+ new_ref = Rule.from_dict(rules)
+ if not ref:
+ session.add(new_ref)
+ ref = new_ref
+ else:
+ for attr in Rule.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_ref, attr))
+ return ref.to_dict()
+
+
+class TenantConnector(TenantDriver):
+
+ def get_tenant_dict(self):
+ with sql.transaction() as session:
+ query = session.query(Tenant)
+ # query = query.filter_by(uuid=tenant_uuid)
+ # ref = query.first().to_dict()
+ tenants = query.all()
+ if not tenants:
+ raise TenantListEmptyError()
+ return {tenant.id: Tenant.to_dict(tenant) for tenant in tenants}
+ # return [Tenant.to_dict(tenant) for tenant in tenants]
+
+ def set_tenant_dict(self, tenant):
+ with sql.transaction() as session:
+ uuid = tenant.keys()[0]
+ query = session.query(Tenant)
+ query = query.filter_by(id=uuid)
+ ref = query.first()
+ if not ref:
+ # if not result, create the database line
+ ref = Tenant.from_dict(tenant)
+ session.add(ref)
+ return Tenant.to_dict(ref)
+ elif not tenant[uuid]["authz"] and not tenant[uuid]["admin"]:
+ # if admin and authz extensions are not set, delete the mapping
+ session.delete(ref)
+ return
+ elif tenant[uuid]["authz"] or tenant[uuid]["admin"]:
+ tenant_ref = ref.to_dict()
+ tenant_ref.update(tenant[uuid])
+ new_tenant = Tenant(
+ id=uuid,
+ name=tenant[uuid]["name"],
+ authz=tenant[uuid]["authz"],
+ admin=tenant[uuid]["admin"],
+ )
+ for attr in Tenant.attributes:
+ if attr != 'id':
+ setattr(ref, attr, getattr(new_tenant, attr))
+ return Tenant.to_dict(ref)
+ raise TenantError()
+
+
+# class InterExtension(sql.ModelBase, sql.DictBase):
+# __tablename__ = 'inter_extension'
+# attributes = [
+# 'id',
+# 'requesting_intra_extension_uuid',
+# 'requested_intra_extension_uuid',
+# 'virtual_entity_uuid',
+# 'genre',
+# 'description',
+# ]
+# id = sql.Column(sql.String(64), primary_key=True)
+# requesting_intra_extension_uuid = sql.Column(sql.String(64))
+# requested_intra_extension_uuid = sql.Column(sql.String(64))
+# virtual_entity_uuid = sql.Column(sql.String(64))
+# genre = sql.Column(sql.String(64))
+# description = sql.Column(sql.Text())
+#
+# @classmethod
+# def from_dict(cls, d):
+# """Override parent from_dict() method with a simpler implementation.
+# """
+# new_d = d.copy()
+# return cls(**new_d)
+#
+# def to_dict(self):
+# """Override parent to_dict() method with a simpler implementation.
+# """
+# return dict(six.iteritems(self))
+#
+#
+# class InterExtensionConnector(InterExtensionDriver):
+#
+# def get_inter_extensions(self):
+# with sql.transaction() as session:
+# query = session.query(InterExtension.id)
+# interextensions = query.all()
+# return [interextension.id for interextension in interextensions]
+#
+# def create_inter_extensions(self, inter_id, inter_extension):
+# with sql.transaction() as session:
+# ie_ref = InterExtension.from_dict(inter_extension)
+# session.add(ie_ref)
+# return InterExtension.to_dict(ie_ref)
+#
+# def get_inter_extension(self, uuid):
+# with sql.transaction() as session:
+# query = session.query(InterExtension)
+# query = query.filter_by(id=uuid)
+# ref = query.first()
+# if not ref:
+# raise exception.NotFound
+# return ref.to_dict()
+#
+# def delete_inter_extensions(self, inter_extension_id):
+# with sql.transaction() as session:
+# ref = session.query(InterExtension).get(inter_extension_id)
+# session.delete(ref)
+
diff --git a/keystone-moon/keystone/contrib/moon/controllers.py b/keystone-moon/keystone/contrib/moon/controllers.py
new file mode 100644
index 00000000..3c87da45
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/controllers.py
@@ -0,0 +1,611 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+from keystone.common import controller
+from keystone.common import dependency
+from keystone import config
+from keystone.models import token_model
+from keystone import exception
+import os
+import glob
+from oslo_log import log
+
+CONF = config.CONF
+LOG = log.getLogger(__name__)
+
+
+@dependency.requires('authz_api')
+class Authz_v3(controller.V3Controller):
+
+ def __init__(self):
+ super(Authz_v3, self).__init__()
+
+ @controller.protected()
+ def get_authz(self, context, tenant_id, subject_id, object_id, action_id):
+ # TODO (dthom): build the authz functionality
+ try:
+ _authz = self.authz_api.authz(tenant_id, subject_id, object_id, action_id)
+ except exception.NotFound:
+ _authz = True
+ except:
+ _authz = False
+ return {"authz": _authz,
+ "tenant_id": tenant_id,
+ "subject_id": subject_id,
+ "object_id": object_id,
+ "action_id": action_id}
+
+
+@dependency.requires('admin_api', 'authz_api')
+class IntraExtensions(controller.V3Controller):
+ collection_name = 'intra_extensions'
+ member_name = 'intra_extension'
+
+ def __init__(self):
+ super(IntraExtensions, self).__init__()
+
+ def _get_user_from_token(self, token_id):
+ response = self.token_provider_api.validate_token(token_id)
+ token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response)
+ return token_ref['user']
+
+ # IntraExtension functions
+ @controller.protected()
+ def get_intra_extensions(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ return {
+ "intra_extensions":
+ self.admin_api.get_intra_extension_list()
+ }
+
+ @controller.protected()
+ def get_intra_extension(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ return {
+ "intra_extensions":
+ self.admin_api.get_intra_extension(uuid=kw['intra_extensions_id'])
+ }
+
+ @controller.protected()
+ def create_intra_extension(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ return self.admin_api.load_intra_extension(kw)
+
+ @controller.protected()
+ def delete_intra_extension(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ if "intra_extensions_id" not in kw:
+ raise exception.Error
+ return self.admin_api.delete_intra_extension(kw["intra_extensions_id"])
+
+ # Perimeter functions
+ @controller.protected()
+ def get_subjects(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_subject_dict(user, ie_uuid)
+
+ @controller.protected()
+ def add_subject(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject = kw["subject_id"]
+ return self.admin_api.add_subject_dict(user, ie_uuid, subject)
+
+ @controller.protected()
+ def del_subject(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject = kw["subject_id"]
+ return self.admin_api.del_subject(user, ie_uuid, subject)
+
+ @controller.protected()
+ def get_objects(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_object_dict(user, ie_uuid)
+
+ @controller.protected()
+ def add_object(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_id = kw["object_id"]
+ return self.admin_api.add_object_dict(user, ie_uuid, object_id)
+
+ @controller.protected()
+ def del_object(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_id = kw["object_id"]
+ return self.admin_api.del_object(user, ie_uuid, object_id)
+
+ @controller.protected()
+ def get_actions(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_action_dict(user, ie_uuid)
+
+ @controller.protected()
+ def add_action(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action = kw["action_id"]
+ return self.admin_api.add_action_dict(user, ie_uuid, action)
+
+ @controller.protected()
+ def del_action(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action = kw["action_id"]
+ return self.admin_api.del_action(user, ie_uuid, action)
+
+ # Metadata functions
+ @controller.protected()
+ def get_subject_categories(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_subject_category_dict(user, ie_uuid)
+
+ @controller.protected()
+ def add_subject_category(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject_category = kw["subject_category_id"]
+ return self.admin_api.add_subject_category_dict(user, ie_uuid, subject_category)
+
+ @controller.protected()
+ def del_subject_category(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject_category = kw["subject_category_id"]
+ return self.admin_api.del_subject_category(user, ie_uuid, subject_category)
+
+ @controller.protected()
+ def get_object_categories(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_object_category_dict(user, ie_uuid)
+
+ @controller.protected()
+ def add_object_category(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_category = kw["object_category_id"]
+ return self.admin_api.add_object_category_dict(user, ie_uuid, object_category)
+
+ @controller.protected()
+ def del_object_category(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_category = kw["object_category_id"]
+ return self.admin_api.del_object_category(user, ie_uuid, object_category)
+
+ @controller.protected()
+ def get_action_categories(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_action_category_dict(user, ie_uuid)
+
+ @controller.protected()
+ def add_action_category(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action_category = kw["action_category_id"]
+ return self.admin_api.add_action_category_dict(user, ie_uuid, action_category)
+
+ @controller.protected()
+ def del_action_category(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action_category = kw["action_category_id"]
+ return self.admin_api.del_action_category(user, ie_uuid, action_category)
+
+ # Scope functions
+ @controller.protected()
+ def get_subject_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ category_id = kw["subject_category_id"]
+ return self.admin_api.get_subject_category_scope_dict(user, ie_uuid, category_id)
+
+ @controller.protected()
+ def add_subject_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject_category = kw["subject_category_id"]
+ subject_category_scope = kw["subject_category_scope_id"]
+ return self.admin_api.add_subject_category_scope_dict(
+ user,
+ ie_uuid,
+ subject_category,
+ subject_category_scope)
+
+ @controller.protected()
+ def del_subject_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject_category = kw["subject_category_id"]
+ subject_category_scope = kw["subject_category_scope_id"]
+ return self.admin_api.del_subject_category_scope(
+ user,
+ ie_uuid,
+ subject_category,
+ subject_category_scope)
+
+ @controller.protected()
+ def get_object_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ category_id = kw["object_category_id"]
+ return self.admin_api.get_object_category_scope_dict(user, ie_uuid, category_id)
+
+ @controller.protected()
+ def add_object_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_category = kw["object_category_id"]
+ object_category_scope = kw["object_category_scope_id"]
+ return self.admin_api.add_object_category_scope_dict(
+ user,
+ ie_uuid,
+ object_category,
+ object_category_scope)
+
+ @controller.protected()
+ def del_object_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_category = kw["object_category_id"]
+ object_category_scope = kw["object_category_scope_id"]
+ return self.admin_api.del_object_category_scope(
+ user,
+ ie_uuid,
+ object_category,
+ object_category_scope)
+
+ @controller.protected()
+ def get_action_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ category_id = kw["action_category_id"]
+ return self.admin_api.get_action_category_scope_dict(user, ie_uuid, category_id)
+
+ @controller.protected()
+ def add_action_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action_category = kw["action_category_id"]
+ action_category_scope = kw["action_category_scope_id"]
+ return self.admin_api.add_action_category_scope_dict(
+ user,
+ ie_uuid,
+ action_category,
+ action_category_scope)
+
+ @controller.protected()
+ def del_action_category_scope(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action_category = kw["action_category_id"]
+ action_category_scope = kw["action_category_scope_id"]
+ return self.admin_api.del_action_category_scope(
+ user,
+ ie_uuid,
+ action_category,
+ action_category_scope)
+
+ # Assignment functions
+ @controller.protected()
+ def get_subject_assignments(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject_id = kw["subject_id"]
+ return self.admin_api.get_subject_category_assignment_dict(user, ie_uuid, subject_id)
+
+ @controller.protected()
+ def add_subject_assignment(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject_id = kw["subject_id"]
+ subject_category = kw["subject_category"]
+ subject_category_scope = kw["subject_category_scope"]
+ return self.admin_api.add_subject_category_assignment_dict(
+ user,
+ ie_uuid,
+ subject_id,
+ subject_category,
+ subject_category_scope)
+
+ @controller.protected()
+ def del_subject_assignment(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ subject_id = kw["subject_id"]
+ subject_category = kw["subject_category"]
+ subject_category_scope = kw["subject_category_scope"]
+ return self.admin_api.del_subject_category_assignment(
+ user,
+ ie_uuid,
+ subject_id,
+ subject_category,
+ subject_category_scope)
+
+ @controller.protected()
+ def get_object_assignments(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_id = kw["object_id"]
+ return self.admin_api.get_object_category_assignment_dict(user, ie_uuid, object_id)
+
+ @controller.protected()
+ def add_object_assignment(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_id = kw["object_id"]
+ object_category = kw["object_category"]
+ object_category_scope = kw["object_category_scope"]
+ return self.admin_api.add_object_category_assignment_dict(
+ user,
+ ie_uuid,
+ object_id,
+ object_category,
+ object_category_scope)
+
+ @controller.protected()
+ def del_object_assignment(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ object_id = kw["object_id"]
+ object_category = kw["object_category"]
+ object_category_scope = kw["object_category_scope"]
+ return self.admin_api.del_object_category_assignment(
+ user,
+ ie_uuid,
+ object_id,
+ object_category,
+ object_category_scope)
+
+ @controller.protected()
+ def get_action_assignments(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action_id = kw["action_id"]
+ return self.admin_api.get_action_category_assignment_dict(user, ie_uuid, action_id)
+
+ @controller.protected()
+ def add_action_assignment(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action_id = kw["action_id"]
+ action_category = kw["action_category"]
+ action_category_scope = kw["action_category_scope"]
+ return self.admin_api.add_action_category_assignment_dict(
+ user,
+ ie_uuid,
+ action_id,
+ action_category,
+ action_category_scope)
+
+ @controller.protected()
+ def del_action_assignment(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ action_id = kw["action_id"]
+ action_category = kw["action_category"]
+ action_category_scope = kw["action_category_scope"]
+ return self.admin_api.del_object_category_assignment(
+ user,
+ ie_uuid,
+ action_id,
+ action_category,
+ action_category_scope)
+
+ # Metarule functions
+ @controller.protected()
+ def get_aggregation_algorithms(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_aggregation_algorithms(user, ie_uuid)
+
+ @controller.protected()
+ def get_aggregation_algorithm(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_aggregation_algorithm(user, ie_uuid)
+
+ @controller.protected()
+ def set_aggregation_algorithm(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ aggregation_algorithm = kw["aggregation_algorithm"]
+ return self.admin_api.set_aggregation_algorithm(user, ie_uuid, aggregation_algorithm)
+
+ @controller.protected()
+ def get_sub_meta_rule(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_sub_meta_rule(user, ie_uuid)
+
+ @controller.protected()
+ def set_sub_meta_rule(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw.pop("intra_extensions_id")
+ # subject_categories = kw["subject_categories"]
+ # action_categories = kw["action_categories"]
+ # object_categories = kw["object_categories"]
+ # relation = kw["relation"]
+ # aggregation_algorithm = kw["aggregation_algorithm"]
+ return self.admin_api.set_sub_meta_rule(
+ user,
+ ie_uuid,
+ kw)
+
+ @controller.protected()
+ def get_sub_meta_rule_relations(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_sub_meta_rule_relations(user, ie_uuid)
+
+ # Rules functions
+ @controller.protected()
+ def get_sub_rules(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ return self.admin_api.get_sub_rules(user, ie_uuid)
+
+ @controller.protected()
+ def set_sub_rule(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ sub_rule = kw["rule"]
+ relation = kw["relation"]
+ return self.admin_api.set_sub_rule(user, ie_uuid, relation, sub_rule)
+
+ @controller.protected()
+ def del_sub_rule(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ ie_uuid = kw["intra_extensions_id"]
+ relation_name = kw["relation_name"]
+ rule = kw["rule"]
+ return self.admin_api.del_sub_rule(
+ user,
+ ie_uuid,
+ relation_name,
+ rule)
+
+
+class AuthzPolicies(controller.V3Controller):
+ collection_name = 'authz_policies'
+ member_name = 'authz_policy'
+
+ def __init__(self):
+ super(AuthzPolicies, self).__init__()
+
+ @controller.protected()
+ def get_authz_policies(self, context, **kw):
+ nodes = glob.glob(os.path.join(CONF.moon.policy_directory, "*"))
+ return {
+ "authz_policies":
+ [os.path.basename(n) for n in nodes if os.path.isdir(n)]
+ }
+
+
+@dependency.requires('tenant_api', 'resource_api')
+class Tenants(controller.V3Controller):
+
+ def __init__(self):
+ super(Tenants, self).__init__()
+
+ def _get_user_from_token(self, token_id):
+ response = self.token_provider_api.validate_token(token_id)
+ token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response)
+ return token_ref['user']
+
+ @controller.protected()
+ def get_tenants(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ return {
+ "tenants":
+ self.tenant_api.get_tenant_dict()
+ }
+
+ @controller.protected()
+ def get_tenant(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ tenant_uuid = kw.get("tenant_uuid")
+ return {
+ "tenant":
+ self.tenant_api.get_tenant_dict()[tenant_uuid]
+ }
+
+ @controller.protected()
+ def set_tenant(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ tenant_uuid = kw.get("id")
+ name = self.resource_api.get_project(tenant_uuid)["name"]
+ authz = kw.get("authz")
+ admin = kw.get("admin")
+ self.tenant_api.set_tenant_dict(tenant_uuid, name, authz, admin)
+ return {
+ "tenant":
+ self.tenant_api.get_tenant_dict()[tenant_uuid]
+ }
+
+ @controller.protected()
+ def delete_tenant(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ tenant_uuid = kw.get("tenant_uuid")
+ self.tenant_api.set_tenant_dict(tenant_uuid, None, None, None)
+
+
+@dependency.requires('authz_api')
+class InterExtensions(controller.V3Controller):
+
+ def __init__(self):
+ super(InterExtensions, self).__init__()
+
+ def _get_user_from_token(self, token_id):
+ response = self.token_provider_api.validate_token(token_id)
+ token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response)
+ return token_ref['user']
+
+ # @controller.protected()
+ # def get_inter_extensions(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ # return {
+ # "inter_extensions":
+ # self.interextension_api.get_inter_extensions()
+ # }
+
+ # @controller.protected()
+ # def get_inter_extension(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ # return {
+ # "inter_extensions":
+ # self.interextension_api.get_inter_extension(uuid=kw['inter_extensions_id'])
+ # }
+
+ # @controller.protected()
+ # def create_inter_extension(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ # return self.interextension_api.create_inter_extension(kw)
+
+ # @controller.protected()
+ # def delete_inter_extension(self, context, **kw):
+ # user = self._get_user_from_token(context["token_id"])
+ # if "inter_extensions_id" not in kw:
+ # raise exception.Error
+ # return self.interextension_api.delete_inter_extension(kw["inter_extensions_id"])
+
+
+@dependency.requires('authz_api')
+class SuperExtensions(controller.V3Controller):
+
+ def __init__(self):
+ super(SuperExtensions, self).__init__()
+
+
+@dependency.requires('moonlog_api', 'authz_api')
+class Logs(controller.V3Controller):
+
+ def __init__(self):
+ super(Logs, self).__init__()
+
+ def _get_user_from_token(self, token_id):
+ response = self.token_provider_api.validate_token(token_id)
+ token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response)
+ return token_ref['user']
+
+ @controller.protected()
+ def get_logs(self, context, **kw):
+ user = self._get_user_from_token(context["token_id"])
+ options = kw.get("options", "")
+ # FIXME (dthom): the authorization for get_logs must be done with an intra_extension
+ #if self.authz_api.admin(user["name"], "logs", "read"):
+ return {
+ "logs":
+ self.moonlog_api.get_logs(options)
+ }
+
diff --git a/keystone-moon/keystone/contrib/moon/core.py b/keystone-moon/keystone/contrib/moon/core.py
new file mode 100644
index 00000000..1dc23c4a
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/core.py
@@ -0,0 +1,2375 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+from uuid import uuid4
+import os
+import json
+import copy
+import re
+import six
+
+from keystone.common import manager
+from keystone import config
+from oslo_log import log
+from keystone.common import dependency
+from keystone import exception
+from oslo_config import cfg
+from keystone.i18n import _
+
+from keystone.contrib.moon.exception import *
+
+CONF = config.CONF
+LOG = log.getLogger(__name__)
+
+_OPTS = [
+ cfg.StrOpt('authz_driver',
+ default='keystone.contrib.moon.backends.flat.SuperExtensionConnector',
+ help='Authorisation backend driver.'),
+ cfg.StrOpt('log_driver',
+ default='keystone.contrib.moon.backends.flat.LogConnector',
+ help='Logs backend driver.'),
+ cfg.StrOpt('superextension_driver',
+ default='keystone.contrib.moon.backends.flat.SuperExtensionConnector',
+ help='SuperExtension backend driver.'),
+ cfg.StrOpt('intraextension_driver',
+ default='keystone.contrib.moon.backends.sql.IntraExtensionConnector',
+ help='IntraExtension backend driver.'),
+ cfg.StrOpt('tenant_driver',
+ default='keystone.contrib.moon.backends.sql.TenantConnector',
+ help='Tenant backend driver.'),
+ cfg.StrOpt('interextension_driver',
+ default='keystone.contrib.moon.backends.sql.InterExtensionConnector',
+ help='InterExtension backend driver.'),
+ cfg.StrOpt('policy_directory',
+ default='/etc/keystone/policies',
+ help='Local directory where all policies are stored.'),
+ cfg.StrOpt('super_extension_directory',
+ default='/etc/keystone/super_extension',
+ help='Local directory where SuperExtension configuration is stored.'),
+]
+CONF.register_opts(_OPTS, group='moon')
+
+
+def filter_args(func):
+ def wrapped(*args, **kwargs):
+ _args = []
+ for arg in args:
+ if type(arg) in (unicode, str):
+ arg = "".join(re.findall("[\w\-+]*", arg))
+ _args.append(arg)
+ for arg in kwargs:
+ if type(kwargs[arg]) in (unicode, str):
+ kwargs[arg] = "".join(re.findall("[\w\-+]*", kwargs[arg]))
+ return func(*_args, **kwargs)
+ return wrapped
+
+
+def enforce(actions, object, **extra):
+ def wrap(func):
+ def wrapped(*args):
+ global actions
+ self = args[0]
+ user_name = args[1]
+ intra_extension_uuid = args[2]
+ _admin_extension_uuid = self.tenant_api.get_admin_extension_uuid(args[2])
+ # func.func_globals["_admin_extension_uuid"] = _admin_extension_uuid
+ if not _admin_extension_uuid:
+ args[0].moonlog_api.warning("No admin IntraExtension found, authorization granted by default.")
+ return func(*args)
+ else:
+ _authz = False
+ if type(actions) in (str, unicode):
+ actions = (actions, )
+ for action in actions:
+ if self.authz_api.authz(
+ intra_extension_uuid,
+ user_name,
+ object,
+ action):
+ _authz = True
+ else:
+ _authz = False
+ break
+ if _authz:
+ return func(*args)
+ return wrapped
+ return wrap
+
+
+def filter_input(data):
+ if type(data) not in (str, unicode):
+ return data
+ try:
+ return "".join(re.findall("[\w\-+*]", data))
+ except TypeError:
+ LOG.error("Error in filtering input data: {}".format(data))
+
+
+@dependency.provider('moonlog_api')
+class LogManager(manager.Manager):
+
+ def __init__(self):
+ driver = CONF.moon.log_driver
+ super(LogManager, self).__init__(driver)
+
+ def get_logs(self, options):
+ return self.driver.get_logs(options)
+
+ def authz(self, message):
+ return self.driver.authz(message)
+
+ def debug(self, message):
+ return self.driver.debug(message)
+
+ def info(self, message):
+ return self.driver.info(message)
+
+ def warning(self, message):
+ return self.driver.warning(message)
+
+ def error(self, message):
+ return self.driver.error(message)
+
+ def critical(self, message):
+ return self.driver.critical(message)
+
+
+@dependency.provider('tenant_api')
+@dependency.requires('moonlog_api')
+class TenantManager(manager.Manager):
+
+ def __init__(self):
+ super(TenantManager, self).__init__(CONF.moon.tenant_driver)
+
+ def get_tenant_dict(self):
+ """
+ Return a dictionnary with all tenants
+ :return: dict
+ """
+ try:
+ return self.driver.get_tenant_dict()
+ except TenantListEmptyError:
+ self.moonlog_api.error(_("Tenant Mapping list is empty."))
+ return {}
+
+ def get_tenant_name(self, tenant_uuid):
+ _tenant_dict = self.get_tenant_dict()
+ if tenant_uuid not in _tenant_dict:
+ raise TenantNotFoundError(_("Tenant UUID ({}) was not found.".format(tenant_uuid)))
+ return _tenant_dict[tenant_uuid]["name"]
+
+ def set_tenant_name(self, tenant_uuid, tenant_name):
+ _tenant_dict = self.get_tenant_dict()
+ if tenant_uuid not in _tenant_dict:
+ raise TenantNotFoundError(_("Tenant UUID ({}) was not found.".format(tenant_uuid)))
+ _tenant_dict[tenant_uuid]['name'] = tenant_name
+ return self.driver.set_tenant_dict(_tenant_dict)
+
+ def get_extension_uuid(self, tenant_uuid, scope="authz"):
+ """
+ Return the UUID of the scoped extension for a particular tenant.
+ :param tenant_uuid: UUID of the tenant
+ :param scope: "admin" or "authz"
+ :return (str): the UUID of the scoped extension
+ """
+ # 1 tenant only with 1 authz extension and 1 admin extension
+ _tenant_dict = self.get_tenant_dict()
+ if tenant_uuid not in _tenant_dict:
+ raise TenantNotFoundError(_("Tenant UUID ({}) was not found.".format(tenant_uuid)))
+ if not _tenant_dict[tenant_uuid][scope]:
+ raise IntraExtensionNotFound(_("No IntraExtension found for Tenant {}.".format(tenant_uuid)))
+ return _tenant_dict[tenant_uuid][scope]
+
+ def get_tenant_uuid(self, extension_uuid):
+ for _tenant_uuid, _tenant_value in six.iteritems(self.get_tenant_dict()):
+ if extension_uuid == _tenant_value["authz"] or extension_uuid == _tenant_value["admin"]:
+ return _tenant_uuid
+ raise TenantNotFoundError()
+
+ def get_admin_extension_uuid(self, authz_extension_uuid):
+ _tenants = self.get_tenant_dict()
+ for _tenant_uuid in _tenants:
+ if authz_extension_uuid == _tenants[_tenant_uuid]['authz']and _tenants[_tenant_uuid]['admin']:
+ return _tenants[_tenant_uuid]['admin']
+ self.moonlog_api.error(_("No IntraExtension found mapping this Authz IntraExtension: {}.".format(
+ authz_extension_uuid)))
+ # FIXME (dthom): if AdminIntraExtensionNotFound, maybe we can add an option in configuration file
+ # to allow or not the fact that Admin IntraExtension can be None
+ # raise AdminIntraExtensionNotFound()
+
+ def delete(self, authz_extension_uuid):
+ _tenants = self.get_tenant_dict()
+ for _tenant_uuid in _tenants:
+ if authz_extension_uuid == _tenants[_tenant_uuid]['authz']:
+ return self.set_tenant_dict(_tenant_uuid, "", "", "")
+ raise AuthzIntraExtensionNotFound(_("No IntraExtension found mapping this Authz IntraExtension: {}.".format(
+ authz_extension_uuid)))
+
+ def set_tenant_dict(self, tenant_uuid, name, authz_extension_uuid, admin_extension_uuid):
+ tenant = {
+ tenant_uuid: {
+ "name": name,
+ "authz": authz_extension_uuid,
+ "admin": admin_extension_uuid
+ }
+ }
+ # TODO (dthom): Tenant must be checked against Keystone database.
+ return self.driver.set_tenant_dict(tenant)
+
+
+class TenantDriver:
+
+ def get_tenant_dict(self):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_tenant_dict(self, tenant):
+ raise exception.NotImplemented() # pragma: no cover
+
+
+@dependency.requires('identity_api', 'moonlog_api', 'tenant_api', 'authz_api')
+class IntraExtensionManager(manager.Manager):
+
+ __genre__ = None
+
+ def __init__(self):
+ driver = CONF.moon.intraextension_driver
+ super(IntraExtensionManager, self).__init__(driver)
+
+ def authz(self, uuid, sub, obj, act):
+ """Check authorization for a particular action.
+
+ :param uuid: UUID of an IntraExtension
+ :param sub: subject of the request
+ :param obj: object of the request
+ :param act: action of the request
+ :return: True or False or raise an exception
+ """
+ if not self.driver.get_intra_extension(uuid):
+ raise IntraExtensionNotFound()
+ # self.moonlog_api.authz("Unknown: Authorization framework disabled ({} {} {} {})".format(uuid, sub, obj, act))
+ # self.moonlog_api.warning("Unknown: Authorization framework disabled ({} {} {} {})".format(uuid, sub, obj, act))
+ # return True
+ # #TODO (dthom): must raise IntraExtensionNotAuthorized
+ # try:
+ # _subject_category_dict = self.driver.get_subject_category_dict(extension_uuid)
+ # _object_category_dict = self.driver.get_object_category_dict(extension_uuid)
+ # _action_category_dict = self.driver.get_action_category_dict(extension_uuid)
+ # _subject_category_value_dict = self.driver.get_subject_category_value_dict(extension_uuid, subject_name)
+ # _object_category_value_dict = self.driver.get_object_category_value_dict(extension_uuid, object_name)
+ # _action_category_value_dict = self.driver.get_action_category_value_dict(extension_uuid, action_name)
+ # _meta_rule = self.driver.get_meta_rule(extension_uuid)
+ # _rules = self.driver.get_rules(extension_uuid)
+ # # TODO: algorithm to validate requests
+ # return True
+ # except exception: # TODO: exception.IntraExtension.NotAuthorized
+ # pass
+ sub_meta_rule = self.driver.get_meta_rule(uuid)
+ subject_assignments = self.driver.get_subject_category_assignment_dict(uuid)
+ action_assignments = self.driver.get_action_category_assignment_dict(uuid)
+ object_assignments = self.driver.get_object_category_assignment_dict(uuid)
+ # check if subject exists
+ if sub not in self.driver.get_subject_dict(uuid):
+ self.moonlog_api.authz("KO: Subject {} unknown".format(sub))
+ return False
+ # check if object exists
+ if obj not in self.driver.get_object_dict(uuid):
+ self.moonlog_api.authz("KO: Object {} unknown".format(obj))
+ return False
+ # check if action exists
+ if act not in self.driver.get_action_dict(uuid):
+ self.moonlog_api.authz("KO: Action {} unknown".format(act))
+ return False
+ # check if subject is in subject_assignment
+ for cat in subject_assignments.keys():
+ if sub in subject_assignments[cat]:
+ break
+ else:
+ self.moonlog_api.authz("KO: Subject no found in categories {}".format(
+ subject_assignments.keys()))
+ return False
+ # check if object is in object_assignment
+ for cat in object_assignments.keys():
+ if obj in object_assignments[cat]:
+ break
+ else:
+ self.moonlog_api.authz("KO: Object no found in categories {}".format(
+ object_assignments))
+ return False
+ # check if action is in action_assignment
+ for cat in action_assignments.keys():
+ if act in action_assignments[cat]:
+ break
+ else:
+ self.moonlog_api.authz("KO: Action no found in categories {}".format(
+ action_assignments.keys()))
+ return False
+ # get all rules for intra_extension
+ rules = self.driver.get_rules(uuid)
+ # check if relation exists in rules
+ relation_to_check = None
+ relations = self.driver.get_sub_meta_rule_relations(uuid)
+ for relation in rules:
+ if relation in relations:
+ # hypothesis: only one relation to check
+ relation_to_check = relation
+ break
+ else:
+ self.moonlog_api.authz("KO: No relation can be used {}".format(rules.keys()))
+ return False
+ for sub_rule in rules[relation_to_check]:
+ for cat in sub_meta_rule[relation_to_check]["subject_categories"]:
+ rule_scope = sub_rule.pop(0)
+ if rule_scope in subject_assignments[cat][sub]:
+ break
+ else:
+ continue
+ for cat in sub_meta_rule[relation_to_check]["action_categories"]:
+ rule_scope = sub_rule.pop(0)
+ if rule_scope in action_assignments[cat][act]:
+ break
+ else:
+ continue
+ for cat in sub_meta_rule[relation_to_check]["object_categories"]:
+ rule_scope = sub_rule.pop(0)
+ if rule_scope in object_assignments[cat][obj]:
+ break
+ else:
+ continue
+ self.moonlog_api.authz("OK ({} {},{},{})".format(uuid, sub, act, obj))
+ return True
+ self.moonlog_api.authz("KO ({} {},{},{})".format(uuid, sub, act, obj))
+ return False
+
+ def __get_key_from_value(self, value, values_dict):
+ return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0]
+
+ def get_intra_extension_list(self):
+ # TODO: check will be done through super_extension later
+ return self.driver.get_intra_extension_list()
+
+ def get_intra_extension_id_for_tenant(self, tenant_id):
+ for intra_extension_id in self.driver.get_intra_extension_list():
+ if self.driver.get_intra_extension(intra_extension_id)["tenant"] == tenant_id:
+ return intra_extension_id
+ LOG.error("IntraExtension not found for tenant {}".format(tenant_id))
+ raise exception.NotFound
+
+ def get_intra_extension(self, uuid):
+ return self.driver.get_intra_extension(uuid)
+
+ def set_perimeter_values(self, ie, policy_dir):
+
+ perimeter_path = os.path.join(policy_dir, 'perimeter.json')
+ f = open(perimeter_path)
+ json_perimeter = json.load(f)
+
+ subject_dict = dict()
+ # We suppose that all subjects can be mapped to a true user in Keystone
+ for _subject in json_perimeter['subjects']:
+ user = self.identity_api.get_user_by_name(_subject, "default")
+ subject_dict[user["id"]] = user["name"]
+ self.driver.set_subject_dict(ie["id"], subject_dict)
+ ie["subjects"] = subject_dict
+
+ # Copy all values for objects and subjects
+ object_dict = dict()
+ for _object in json_perimeter['objects']:
+ object_dict[uuid4().hex] = _object
+ self.driver.set_object_dict(ie["id"], object_dict)
+ ie["objects"] = object_dict
+
+ action_dict = dict()
+ for _action in json_perimeter['actions']:
+ action_dict[uuid4().hex] = _action
+ self.driver.set_action_dict(ie["id"], action_dict)
+ ie["ations"] = action_dict
+
+ def set_metadata_values(self, ie, policy_dir):
+
+ metadata_path = os.path.join(policy_dir, 'metadata.json')
+ f = open(metadata_path)
+ json_perimeter = json.load(f)
+
+ subject_categories_dict = dict()
+ for _cat in json_perimeter['subject_categories']:
+ subject_categories_dict[uuid4().hex] = _cat
+ self.driver.set_subject_category_dict(ie["id"], subject_categories_dict)
+ # Initialize scope categories
+ for _cat in subject_categories_dict.keys():
+ self.driver.set_subject_category_scope_dict(ie["id"], _cat, {})
+ ie['subject_categories'] = subject_categories_dict
+
+ object_categories_dict = dict()
+ for _cat in json_perimeter['object_categories']:
+ object_categories_dict[uuid4().hex] = _cat
+ self.driver.set_object_category_dict(ie["id"], object_categories_dict)
+ # Initialize scope categories
+ for _cat in object_categories_dict.keys():
+ self.driver.set_object_category_scope_dict(ie["id"], _cat, {})
+ ie['object_categories'] = object_categories_dict
+
+ action_categories_dict = dict()
+ for _cat in json_perimeter['action_categories']:
+ action_categories_dict[uuid4().hex] = _cat
+ self.driver.set_action_category_dict(ie["id"], action_categories_dict)
+ # Initialize scope categories
+ for _cat in action_categories_dict.keys():
+ self.driver.set_action_category_scope_dict(ie["id"], _cat, {})
+ ie['action_categories'] = action_categories_dict
+
+ def set_scope_values(self, ie, policy_dir):
+
+ metadata_path = os.path.join(policy_dir, 'scope.json')
+ f = open(metadata_path)
+ json_perimeter = json.load(f)
+
+ ie['subject_category_scope'] = dict()
+ for category, scope in json_perimeter["subject_category_scope"].iteritems():
+ category = self.__get_key_from_value(
+ category,
+ self.driver.get_subject_category_dict(ie["id"])["subject_categories"])
+ _scope_dict = dict()
+ for _scope in scope:
+ _scope_dict[uuid4().hex] = _scope
+ self.driver.set_subject_category_scope_dict(ie["id"], category, _scope_dict)
+ ie['subject_category_scope'][category] = _scope_dict
+
+ ie['object_category_scope'] = dict()
+ for category, scope in json_perimeter["object_category_scope"].iteritems():
+ category = self.__get_key_from_value(
+ category,
+ self.driver.get_object_category_dict(ie["id"])["object_categories"])
+ _scope_dict = dict()
+ for _scope in scope:
+ _scope_dict[uuid4().hex] = _scope
+ self.driver.set_object_category_scope_dict(ie["id"], category, _scope_dict)
+ ie['object_category_scope'][category] = _scope_dict
+
+ ie['action_category_scope'] = dict()
+ for category, scope in json_perimeter["action_category_scope"].iteritems():
+ category = self.__get_key_from_value(
+ category,
+ self.driver.get_action_category_dict(ie["id"])["action_categories"])
+ _scope_dict = dict()
+ for _scope in scope:
+ _scope_dict[uuid4().hex] = _scope
+ self.driver.set_action_category_scope_dict(ie["id"], category, _scope_dict)
+ ie['action_category_scope'][category] = _scope_dict
+
+ def set_assignments_values(self, ie, policy_dir):
+
+ f = open(os.path.join(policy_dir, 'assignment.json'))
+ json_assignments = json.load(f)
+
+ subject_assignments = dict()
+ for category, value in json_assignments['subject_assignments'].iteritems():
+ category = self.__get_key_from_value(
+ category,
+ self.driver.get_subject_category_dict(ie["id"])["subject_categories"])
+ for user in value:
+ if user not in subject_assignments:
+ subject_assignments[user] = dict()
+ subject_assignments[user][category] = \
+ map(lambda x: self.__get_key_from_value(x, ie['subject_category_scope'][category]), value[user])
+ else:
+ subject_assignments[user][category].extend(
+ map(lambda x: self.__get_key_from_value(x, ie['subject_category_scope'][category]), value[user])
+ )
+ # Note (dthom): subject_category_assignment must be initialized because when there is no data in json
+ # we will not go through the for loop
+ self.driver.set_subject_category_assignment_dict(ie["id"])
+ for subject in subject_assignments:
+ self.driver.set_subject_category_assignment_dict(ie["id"], subject, subject_assignments[subject])
+
+ object_assignments = dict()
+ for category, value in json_assignments["object_assignments"].iteritems():
+ category = self.__get_key_from_value(
+ category,
+ self.driver.get_object_category_dict(ie["id"])["object_categories"])
+ for object_name in value:
+ if object_name not in object_assignments:
+ object_assignments[object_name] = dict()
+ object_assignments[object_name][category] = \
+ map(lambda x: self.__get_key_from_value(x, ie['object_category_scope'][category]),
+ value[object_name])
+ else:
+ object_assignments[object_name][category].extend(
+ map(lambda x: self.__get_key_from_value(x, ie['object_category_scope'][category]),
+ value[object_name])
+ )
+ # Note (dthom): object_category_assignment must be initialized because when there is no data in json
+ # we will not go through the for loop
+ self.driver.set_object_category_assignment_dict(ie["id"])
+ for object in object_assignments:
+ self.driver.set_object_category_assignment_dict(ie["id"], object, object_assignments[object])
+
+ action_assignments = dict()
+ for category, value in json_assignments["action_assignments"].iteritems():
+ category = self.__get_key_from_value(
+ category,
+ self.driver.get_action_category_dict(ie["id"])["action_categories"])
+ for action_name in value:
+ if action_name not in action_assignments:
+ action_assignments[action_name] = dict()
+ action_assignments[action_name][category] = \
+ map(lambda x: self.__get_key_from_value(x, ie['action_category_scope'][category]),
+ value[action_name])
+ else:
+ action_assignments[action_name][category].extend(
+ map(lambda x: self.__get_key_from_value(x, ie['action_category_scope'][category]),
+ value[action_name])
+ )
+ # Note (dthom): action_category_assignment must be initialized because when there is no data in json
+ # we will not go through the for loop
+ self.driver.set_action_category_assignment_dict(ie["id"])
+ for action in action_assignments:
+ self.driver.set_action_category_assignment_dict(ie["id"], action, action_assignments[action])
+
+ def set_metarule_values(self, ie, policy_dir):
+
+ metadata_path = os.path.join(policy_dir, 'metarule.json')
+ f = open(metadata_path)
+ json_metarule = json.load(f)
+ # ie["meta_rules"] = copy.deepcopy(json_metarule)
+ metarule = dict()
+ categories = {
+ "subject_categories": self.driver.get_subject_category_dict(ie["id"]),
+ "object_categories": self.driver.get_object_category_dict(ie["id"]),
+ "action_categories": self.driver.get_action_category_dict(ie["id"])
+ }
+ # Translate value from JSON file to UUID for Database
+ for relation in json_metarule["sub_meta_rules"]:
+ metarule[relation] = dict()
+ for item in ("subject_categories", "object_categories", "action_categories"):
+ metarule[relation][item] = list()
+ for element in json_metarule["sub_meta_rules"][relation][item]:
+ metarule[relation][item].append(self.__get_key_from_value(
+ element,
+ categories[item][item]
+ ))
+ submetarules = {
+ "aggregation": json_metarule["aggregation"],
+ "sub_meta_rules": metarule
+ }
+ self.driver.set_meta_rule_dict(ie["id"], submetarules)
+
+ def set_subrules_values(self, ie, policy_dir):
+
+ metadata_path = os.path.join(policy_dir, 'rules.json')
+ f = open(metadata_path)
+ json_rules = json.load(f)
+ ie["sub_rules"] = {"rules": copy.deepcopy(json_rules)}
+ # Translate value from JSON file to UUID for Database
+ rules = dict()
+ sub_meta_rules = self.driver.get_meta_rule_dict(ie["id"])
+ for relation in json_rules:
+ if relation not in self.get_sub_meta_rule_relations("admin", ie["id"])["sub_meta_rule_relations"]:
+ raise IntraExtensionError("Bad relation name {} in rules".format(relation))
+ rules[relation] = list()
+ for rule in json_rules[relation]:
+ subrule = list()
+ for cat, cat_func in (
+ ("subject_categories", self.driver.get_subject_category_scope_dict),
+ ("action_categories", self.driver.get_action_category_scope_dict),
+ ("object_categories", self.driver.get_object_category_scope_dict),
+ ):
+ for cat_value in sub_meta_rules["sub_meta_rules"][relation][cat]:
+ scope = cat_func(
+ ie["id"],
+ cat_value
+ )[cat_func.__name__.replace("get_", "").replace("_dict", "")]
+
+ _ = rule.pop(0)
+ a_scope = self.__get_key_from_value(_, scope[cat_value])
+ subrule.append(a_scope)
+ # if a positive/negative value exists, all titem of rule have not be consumed
+ if len(rule) >= 1 and type(rule[0]) is bool:
+ subrule.append(rule[0])
+ else:
+ # if value doesn't exist add a default value
+ subrule.append(True)
+ rules[relation].append(subrule)
+ self.driver.set_rules(ie["id"], rules)
+
+ def load_intra_extension(self, intra_extension):
+ ie = dict()
+ # TODO: clean some values
+ ie['id'] = uuid4().hex
+ ie["name"] = filter_input(intra_extension["name"])
+ ie["model"] = filter_input(intra_extension["policymodel"])
+ ie["description"] = filter_input(intra_extension["description"])
+ ref = self.driver.set_intra_extension(ie['id'], ie)
+ self.moonlog_api.debug("Creation of IE: {}".format(ref))
+ # read the profile given by "policymodel" and populate default variables
+ policy_dir = os.path.join(CONF.moon.policy_directory, ie["model"])
+ self.set_perimeter_values(ie, policy_dir)
+ self.set_metadata_values(ie, policy_dir)
+ self.set_scope_values(ie, policy_dir)
+ self.set_assignments_values(ie, policy_dir)
+ self.set_metarule_values(ie, policy_dir)
+ self.set_subrules_values(ie, policy_dir)
+ return ref
+
+ def delete_intra_extension(self, intra_extension_id):
+ ref = self.driver.delete_intra_extension(intra_extension_id)
+ return ref
+
+ # Perimeter functions
+
+ @filter_args
+ @enforce("read", "subjects")
+ def get_subject_dict(self, user_name, intra_extension_uuid):
+ return self.driver.get_subject_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce(("read", "write"), "subjects")
+ def set_subject_dict(self, user_name, intra_extension_uuid, subject_dict):
+ for uuid in subject_dict:
+ # Next line will raise an error if user is not present in Keystone database
+ self.identity_api.get_user(uuid)
+ return self.driver.set_subject_dict(intra_extension_uuid, subject_dict)
+
+ @filter_args
+ @enforce(("read", "write"), "subjects")
+ def add_subject_dict(self, user_name, intra_extension_uuid, subject_uuid):
+ # Next line will raise an error if user is not present in Keystone database
+ user = self.identity_api.get_user(subject_uuid)
+ return self.driver.add_subject(intra_extension_uuid, subject_uuid, user["name"])
+
+ @filter_args
+ @enforce("write", "subjects")
+ def del_subject(self, user_name, intra_extension_uuid, subject_uuid):
+ self.driver.remove_subject(intra_extension_uuid, subject_uuid)
+
+ @filter_args
+ @enforce("read", "objects")
+ def get_object_dict(self, user_name, intra_extension_uuid):
+ return self.driver.get_object_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce(("read", "write"), "objects")
+ def set_object_dict(self, user_name, intra_extension_uuid, object_dict):
+ return self.driver.set_object_dict(intra_extension_uuid, object_dict)
+
+ @filter_args
+ @enforce(("read", "write"), "objects")
+ def add_object_dict(self, user_name, intra_extension_uuid, object_name):
+ object_uuid = uuid4().hex
+ return self.driver.add_object(intra_extension_uuid, object_uuid, object_name)
+
+ @filter_args
+ @enforce("write", "objects")
+ def del_object(self, user_name, intra_extension_uuid, object_uuid):
+ self.driver.remove_object(intra_extension_uuid, object_uuid)
+
+ @filter_args
+ @enforce("read", "actions")
+ def get_action_dict(self, user_name, intra_extension_uuid):
+ return self.driver.get_action_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce(("read", "write"), "actions")
+ def set_action_dict(self, user_name, intra_extension_uuid, action_dict):
+ return self.driver.set_action_dict(intra_extension_uuid, action_dict)
+
+ @filter_args
+ @enforce(("read", "write"), "actions")
+ def add_action_dict(self, user_name, intra_extension_uuid, action_name):
+ action_uuid = uuid4().hex
+ return self.driver.add_action(intra_extension_uuid, action_uuid, action_name)
+
+ @filter_args
+ @enforce("write", "actions")
+ def del_action(self, user_name, intra_extension_uuid, action_uuid):
+ self.driver.remove_action(intra_extension_uuid, action_uuid)
+
+ # Metadata functions
+
+ @filter_args
+ @enforce("read", "subject_categories")
+ def get_subject_category_dict(self, user_name, intra_extension_uuid):
+ return self.driver.get_subject_category_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce("read", "subject_categories")
+ @enforce("read", "subject_category_scope")
+ @enforce("write", "subject_category_scope")
+ def set_subject_category_dict(self, user_name, intra_extension_uuid, subject_category):
+ subject_category_dict = self.driver.set_subject_category_dict(intra_extension_uuid, subject_category)
+ # if we add a new category, we must add it to the subject_category_scope
+ for _cat in subject_category.keys():
+ try:
+ _ = self.driver.get_subject_category_scope_dict(intra_extension_uuid, _cat)
+ except CategoryNotFound:
+ self.driver.set_subject_category_scope_dict(intra_extension_uuid, _cat, {})
+ return subject_category_dict
+
+ @filter_args
+ @enforce("read", "subject_categories")
+ @enforce("write", "subject_categories")
+ def add_subject_category_dict(self, user_name, intra_extension_uuid, subject_category_name):
+ subject_category_uuid = uuid4().hex
+ return self.driver.add_subject_category_dict(intra_extension_uuid, subject_category_uuid, subject_category_name)
+
+ @filter_args
+ @enforce("write", "subject_categories")
+ def del_subject_category(self, user_name, intra_extension_uuid, subject_uuid):
+ return self.driver.remove_subject_category(intra_extension_uuid, subject_uuid)
+
+ @filter_args
+ @enforce("read", "object_categories")
+ def get_object_category_dict(self, user_name, intra_extension_uuid):
+ return self.driver.get_object_category_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce("read", "object_categories")
+ @enforce("read", "object_category_scope")
+ @enforce("write", "object_category_scope")
+ def set_object_category_dict(self, user_name, intra_extension_uuid, object_category):
+ object_category_dict = self.driver.set_object_category_dict(intra_extension_uuid, object_category)
+ # if we add a new category, we must add it to the object_category_scope
+ for _cat in object_category.keys():
+ try:
+ _ = self.driver.get_object_category_scope_dict(intra_extension_uuid, _cat)
+ except CategoryNotFound:
+ self.driver.set_object_category_scope_dict(intra_extension_uuid, _cat, {})
+ return object_category_dict
+
+ @filter_args
+ @enforce("read", "object_categories")
+ @enforce("write", "object_categories")
+ def add_object_category_dict(self, user_name, intra_extension_uuid, object_category_name):
+ object_category_uuid = uuid4().hex
+ return self.driver.add_object_category_dict(intra_extension_uuid, object_category_uuid, object_category_name)
+
+ @filter_args
+ @enforce("write", "object_categories")
+ def del_object_category(self, user_name, intra_extension_uuid, object_uuid):
+ return self.driver.remove_object_category(intra_extension_uuid, object_uuid)
+
+ @filter_args
+ @enforce("read", "action_categories")
+ def get_action_category_dict(self, user_name, intra_extension_uuid):
+ return self.driver.get_action_category_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce("read", "action_categories")
+ @enforce("read", "action_category_scope")
+ @enforce("write", "action_category_scope")
+ def set_action_category_dict(self, user_name, intra_extension_uuid, action_category):
+ action_category_dict = self.driver.set_action_category_dict(intra_extension_uuid, action_category)
+ # if we add a new category, we must add it to the action_category_scope
+ for _cat in action_category.keys():
+ try:
+ _ = self.driver.get_action_category_scope_dict(intra_extension_uuid, _cat)
+ except CategoryNotFound:
+ self.driver.set_action_category_scope_dict(intra_extension_uuid, _cat, {})
+ return action_category_dict
+
+ @filter_args
+ @enforce("read", "action_categories")
+ @enforce("write", "action_categories")
+ def add_action_category_dict(self, user_name, intra_extension_uuid, action_category_name):
+ action_category_uuid = uuid4().hex
+ return self.driver.add_action_category_dict(intra_extension_uuid, action_category_uuid, action_category_name)
+
+ @filter_args
+ @enforce("write", "action_categories")
+ def del_action_category(self, user_name, intra_extension_uuid, action_uuid):
+ return self.driver.remove_action_category(intra_extension_uuid, action_uuid)
+
+ # Scope functions
+ @filter_args
+ @enforce("read", "subject_category_scope")
+ @enforce("read", "subject_category")
+ def get_subject_category_scope_dict(self, user_name, intra_extension_uuid, category):
+ if category not in self.get_subject_category_dict(user_name, intra_extension_uuid)["subject_categories"]:
+ raise IntraExtensionError("Subject category {} is unknown.".format(category))
+ return self.driver.get_subject_category_scope_dict(intra_extension_uuid, category)
+
+ @filter_args
+ @enforce("read", "subject_category_scope")
+ @enforce("read", "subject_category")
+ def set_subject_category_scope_dict(self, user_name, intra_extension_uuid, category, scope):
+ if category not in self.get_subject_category_dict(user_name, intra_extension_uuid)["subject_categories"]:
+ raise IntraExtensionError("Subject category {} is unknown.".format(category))
+ return self.driver.set_subject_category_scope_dict(intra_extension_uuid, category, scope)
+
+ @filter_args
+ @enforce(("read", "write"), "subject_category_scope")
+ @enforce("read", "subject_category")
+ def add_subject_category_scope_dict(self, user_name, intra_extension_uuid, subject_category, scope_name):
+ subject_categories = self.get_subject_category_dict(user_name, intra_extension_uuid)
+ # check if subject_category exists in database
+ if subject_category not in subject_categories["subject_categories"]:
+ raise IntraExtensionError("Subject category {} is unknown.".format(subject_category))
+ scope_uuid = uuid4().hex
+ return self.driver.add_subject_category_scope_dict(
+ intra_extension_uuid,
+ subject_category,
+ scope_uuid,
+ scope_name)
+
+ @filter_args
+ @enforce("write", "subject_category_scope")
+ @enforce("read", "subject_category")
+ def del_subject_category_scope(self, user_name, intra_extension_uuid, subject_category, subject_category_scope):
+ subject_categories = self.get_subject_category_dict(user_name, intra_extension_uuid)
+ # check if subject_category exists in database
+ if subject_category not in subject_categories["subject_categories"]:
+ raise IntraExtensionError("Subject category {} is unknown.".format(subject_category))
+ return self.driver.remove_subject_category_scope_dict(
+ intra_extension_uuid,
+ subject_category,
+ subject_category_scope)
+
+ @filter_args
+ @enforce("read", "object_category_scope")
+ @enforce("read", "object_category")
+ def get_object_category_scope_dict(self, user_name, intra_extension_uuid, category):
+ if category not in self.get_object_category_dict(user_name, intra_extension_uuid)["object_categories"]:
+ raise IntraExtensionError("Object category {} is unknown.".format(category))
+ return self.driver.get_object_category_scope_dict(intra_extension_uuid, category)
+
+ @filter_args
+ @enforce("read", "object_category_scope")
+ @enforce("read", "object_category")
+ def set_object_category_scope_dict(self, user_name, intra_extension_uuid, category, scope):
+ if category not in self.get_object_category_dict(user_name, intra_extension_uuid)["object_categories"]:
+ raise IntraExtensionError("Object category {} is unknown.".format(category))
+ return self.driver.set_object_category_scope_dict(intra_extension_uuid, category, scope)
+
+ @filter_args
+ @enforce(("read", "write"), "object_category_scope")
+ @enforce("read", "object_category")
+ def add_object_category_scope_dict(self, user_name, intra_extension_uuid, object_category, scope_name):
+ object_categories = self.get_object_category_dict(user_name, intra_extension_uuid)
+ # check if object_category exists in database
+ if object_category not in object_categories["object_categories"]:
+ raise IntraExtensionError("Object category {} is unknown.".format(object_category))
+ scope_uuid = uuid4().hex
+ return self.driver.add_object_category_scope_dict(
+ intra_extension_uuid,
+ object_category,
+ scope_uuid,
+ scope_name)
+
+ @filter_args
+ @enforce("write", "object_category_scope")
+ @enforce("read", "object_category")
+ def del_object_category_scope(self, user_name, intra_extension_uuid, object_category, object_category_scope):
+ object_categories = self.get_object_category_dict(user_name, intra_extension_uuid)
+ # check if object_category exists in database
+ if object_category not in object_categories["object_categories"]:
+ raise IntraExtensionError("Object category {} is unknown.".format(object_category))
+ return self.driver.remove_object_category_scope_dict(
+ intra_extension_uuid,
+ object_category,
+ object_category_scope)
+
+ @filter_args
+ @enforce("read", "action_category_scope")
+ @enforce("read", "action_category")
+ def get_action_category_scope_dict(self, user_name, intra_extension_uuid, category):
+ if category not in self.get_action_category_dict(user_name, intra_extension_uuid)["action_categories"]:
+ raise IntraExtensionError("Action category {} is unknown.".format(category))
+ return self.driver.get_action_category_scope_dict(intra_extension_uuid, category)
+
+ @filter_args
+ @enforce(("read", "write"), "action_category_scope")
+ @enforce("read", "action_category")
+ def set_action_category_scope_dict(self, user_name, intra_extension_uuid, category, scope):
+ if category not in self.get_action_category_dict(user_name, intra_extension_uuid)["action_categories"]:
+ raise IntraExtensionError("Action category {} is unknown.".format(category))
+ return self.driver.set_action_category_scope_dict(intra_extension_uuid, category, scope)
+
+ @filter_args
+ @enforce(("read", "write"), "action_category_scope")
+ @enforce("read", "action_category")
+ def add_action_category_scope_dict(self, user_name, intra_extension_uuid, action_category, scope_name):
+ action_categories = self.get_action_category_dict(user_name, intra_extension_uuid)
+ # check if action_category exists in database
+ if action_category not in action_categories["action_categories"]:
+ raise IntraExtensionError("Action category {} is unknown.".format(action_category))
+ scope_uuid = uuid4().hex
+ return self.driver.add_action_category_scope_dict(
+ intra_extension_uuid,
+ action_category,
+ scope_uuid,
+ scope_name)
+
+ @filter_args
+ @enforce("write", "action_category_scope")
+ @enforce("read", "action_category")
+ def del_action_category_scope(self, user_name, intra_extension_uuid, action_category, action_category_scope):
+ action_categories = self.get_action_category_dict(user_name, intra_extension_uuid)
+ # check if action_category exists in database
+ if action_category not in action_categories["action_categories"]:
+ raise IntraExtensionError("Action category {} is unknown.".format(action_category))
+ return self.driver.remove_action_category_scope_dict(
+ intra_extension_uuid,
+ action_category,
+ action_category_scope)
+
+ # Assignment functions
+
+ @filter_args
+ @enforce("read", "subject_category_assignment")
+ @enforce("read", "subjects")
+ def get_subject_category_assignment_dict(self, user_name, intra_extension_uuid, subject_uuid):
+ # check if subject exists in database
+ if subject_uuid not in self.get_subject_dict(user_name, intra_extension_uuid)["subjects"]:
+ LOG.error("add_subject_assignment: unknown subject_id {}".format(subject_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.get_subject_category_assignment_dict(intra_extension_uuid, subject_uuid)
+
+ @filter_args
+ @enforce("read", "subject_category_assignment")
+ @enforce("write", "subject_category_assignment")
+ @enforce("read", "subjects")
+ def set_subject_category_assignment_dict(self, user_name, intra_extension_uuid, subject_uuid, assignment_dict):
+ # check if subject exists in database
+ if subject_uuid not in self.get_subject_dict(user_name, intra_extension_uuid)["subjects"]:
+ LOG.error("add_subject_assignment: unknown subject_id {}".format(subject_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.set_subject_category_assignment_dict(intra_extension_uuid, subject_uuid, assignment_dict)
+
+ @filter_args
+ @enforce("read", "subject_category_assignment")
+ @enforce("write", "subject_category_assignment")
+ @enforce("read", "subjects")
+ @enforce("read", "subject_category")
+ def del_subject_category_assignment(self, user_name, intra_extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ # check if category exists in database
+ if category_uuid not in self.get_subject_category_dict(user_name, intra_extension_uuid)["subject_categories"]:
+ LOG.error("add_subject_category_scope: unknown subject_category {}".format(category_uuid))
+ raise IntraExtensionError("Bad input data")
+ # check if subject exists in database
+ if subject_uuid not in self.get_subject_dict(user_name, intra_extension_uuid)["subjects"]:
+ LOG.error("add_subject_assignment: unknown subject_id {}".format(subject_uuid))
+ raise IntraExtensionError("Bad input data")
+ self.driver.remove_subject_category_assignment(intra_extension_uuid, subject_uuid, category_uuid, scope_uuid)
+
+ @filter_args
+ @enforce("write", "subject_category_assignment")
+ @enforce("read", "subjects")
+ @enforce("read", "subject_category")
+ def add_subject_category_assignment_dict(self, user_name, intra_extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ # check if category exists in database
+ if category_uuid not in self.get_subject_category_dict(user_name, intra_extension_uuid)["subject_categories"]:
+ LOG.error("add_subject_category_scope: unknown subject_category {}".format(category_uuid))
+ raise IntraExtensionError("Bad input data")
+ # check if subject exists in database
+ if subject_uuid not in self.get_subject_dict(user_name, intra_extension_uuid)["subjects"]:
+ LOG.error("add_subject_assignment: unknown subject_id {}".format(subject_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.add_subject_category_assignment_dict(intra_extension_uuid, subject_uuid, category_uuid, scope_uuid)
+
+ @filter_args
+ @enforce("read", "object_category_assignment")
+ @enforce("read", "objects")
+ def get_object_category_assignment_dict(self, user_name, intra_extension_uuid, object_uuid):
+ # check if object exists in database
+ if object_uuid not in self.get_object_dict(user_name, intra_extension_uuid)["objects"]:
+ LOG.error("add_object_assignment: unknown object_id {}".format(object_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.get_object_category_assignment_dict(intra_extension_uuid, object_uuid)
+
+ @filter_args
+ @enforce("read", "object_category_assignment")
+ @enforce("write", "object_category_assignment")
+ @enforce("read", "objects")
+ def set_object_category_assignment_dict(self, user_name, intra_extension_uuid, object_uuid, assignment_dict):
+ # check if object exists in database
+ if object_uuid not in self.get_object_dict(user_name, intra_extension_uuid)["objects"]:
+ LOG.error("add_object_assignment: unknown object_id {}".format(object_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.set_object_category_assignment_dict(intra_extension_uuid, object_uuid, assignment_dict)
+
+ @filter_args
+ @enforce("read", "object_category_assignment")
+ @enforce("write", "object_category_assignment")
+ @enforce("read", "objects")
+ @enforce("read", "object_category")
+ def del_object_category_assignment(self, user_name, intra_extension_uuid, object_uuid, category_uuid, scope_uuid):
+ # check if category exists in database
+ if category_uuid not in self.get_object_category_dict(user_name, intra_extension_uuid)["object_categories"]:
+ LOG.error("add_object_category_scope: unknown object_category {}".format(category_uuid))
+ raise IntraExtensionError("Bad input data")
+ # check if object exists in database
+ if object_uuid not in self.get_object_dict(user_name, intra_extension_uuid)["objects"]:
+ LOG.error("add_object_assignment: unknown object_id {}".format(object_uuid))
+ raise IntraExtensionError("Bad input data")
+ self.driver.remove_object_category_assignment(intra_extension_uuid, object_uuid, category_uuid, scope_uuid)
+
+ @filter_args
+ @enforce("write", "object_category_assignment")
+ @enforce("read", "objects")
+ @enforce("read", "object_category")
+ def add_object_category_assignment_dict(self, user_name, intra_extension_uuid, object_uuid, category_uuid, scope_uuid):
+ # check if category exists in database
+ if category_uuid not in self.get_object_category_dict(user_name, intra_extension_uuid)["object_categories"]:
+ LOG.error("add_object_category_scope: unknown object_category {}".format(category_uuid))
+ raise IntraExtensionError("Bad input data")
+ # check if object exists in database
+ if object_uuid not in self.get_object_dict(user_name, intra_extension_uuid)["objects"]:
+ LOG.error("add_object_assignment: unknown object_id {}".format(object_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.add_object_category_assignment_dict(intra_extension_uuid, object_uuid, category_uuid, scope_uuid)
+
+ @filter_args
+ @enforce("read", "action_category_assignment")
+ @enforce("read", "actions")
+ def get_action_category_assignment_dict(self, user_name, intra_extension_uuid, action_uuid):
+ # check if action exists in database
+ if action_uuid not in self.get_action_dict(user_name, intra_extension_uuid)["actions"]:
+ LOG.error("add_action_assignment: unknown action_id {}".format(action_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.get_action_category_assignment_dict(intra_extension_uuid, action_uuid)
+
+ @filter_args
+ @enforce("read", "action_category_assignment")
+ @enforce("write", "action_category_assignment")
+ @enforce("read", "actions")
+ def set_action_category_assignment_dict(self, user_name, intra_extension_uuid, action_uuid, assignment_dict):
+ # check if action exists in database
+ if action_uuid not in self.get_action_dict(user_name, intra_extension_uuid)["actions"]:
+ LOG.error("add_action_assignment: unknown action_id {}".format(action_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.set_action_category_assignment_dict(intra_extension_uuid, action_uuid, assignment_dict)
+
+ @filter_args
+ @enforce("read", "action_category_assignment")
+ @enforce("write", "action_category_assignment")
+ @enforce("read", "actions")
+ @enforce("read", "action_category")
+ def del_action_category_assignment(self, user_name, intra_extension_uuid, action_uuid, category_uuid, scope_uuid):
+ # check if category exists in database
+ if category_uuid not in self.get_action_category_dict(user_name, intra_extension_uuid)["action_categories"]:
+ LOG.error("add_action_category_scope: unknown action_category {}".format(category_uuid))
+ raise IntraExtensionError("Bad input data")
+ # check if action exists in database
+ if action_uuid not in self.get_action_dict(user_name, intra_extension_uuid)["actions"]:
+ LOG.error("add_action_assignment: unknown action_id {}".format(action_uuid))
+ raise IntraExtensionError("Bad input data")
+ self.driver.remove_action_category_assignment(intra_extension_uuid, action_uuid, category_uuid, scope_uuid)
+
+ @filter_args
+ @enforce("write", "action_category_assignment")
+ @enforce("read", "actions")
+ @enforce("read", "action_category")
+ def add_action_category_assignment_dict(self, user_name, intra_extension_uuid, action_uuid, category_uuid, scope_uuid):
+ # check if category exists in database
+ if category_uuid not in self.get_action_category_dict(user_name, intra_extension_uuid)["action_categories"]:
+ LOG.error("add_action_category_scope: unknown action_category {}".format(category_uuid))
+ raise IntraExtensionError("Bad input data")
+ # check if action exists in database
+ if action_uuid not in self.get_action_dict(user_name, intra_extension_uuid)["actions"]:
+ LOG.error("add_action_assignment: unknown action_id {}".format(action_uuid))
+ raise IntraExtensionError("Bad input data")
+ return self.driver.add_action_category_assignment_dict(
+ intra_extension_uuid,
+ action_uuid,
+ category_uuid,
+ scope_uuid
+ )
+
+ # Metarule functions
+ @filter_args
+ def get_aggregation_algorithms(self, user_name, intra_extension_uuid):
+ # TODO: check which algorithms are really usable
+ return {"aggregation_algorithms": ["and_true_aggregation", "test_aggregation"]}
+
+ @filter_args
+ @enforce("read", "aggregation_algorithms")
+ def get_aggregation_algorithm(self, user_name, intra_extension_uuid):
+ return self.driver.get_meta_rule_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce("read", "aggregation_algorithms")
+ @enforce("write", "aggregation_algorithms")
+ def set_aggregation_algorithm(self, user_name, intra_extension_uuid, aggregation_algorithm):
+ if aggregation_algorithm not in self.get_aggregation_algorithms(
+ user_name, intra_extension_uuid)["aggregation_algorithms"]:
+ raise IntraExtensionError("Unknown aggregation_algorithm: {}".format(aggregation_algorithm))
+ meta_rule = self.driver.get_meta_rule_dict(intra_extension_uuid)
+ meta_rule["aggregation"] = aggregation_algorithm
+ return self.driver.set_meta_rule_dict(intra_extension_uuid, meta_rule)
+
+ @filter_args
+ @enforce("read", "sub_meta_rule")
+ def get_sub_meta_rule(self, user_name, intra_extension_uuid):
+ return self.driver.get_meta_rule_dict(intra_extension_uuid)
+
+ @filter_args
+ @enforce("read", "sub_meta_rule")
+ @enforce("write", "sub_meta_rule")
+ def set_sub_meta_rule(self, user_name, intra_extension_uuid, sub_meta_rules):
+ # TODO (dthom): When sub_meta_rule is set, all rules must be dropped
+ # because the previous rules cannot be mapped to the new sub_meta_rule.
+ for relation in sub_meta_rules.keys():
+ if relation not in self.get_sub_meta_rule_relations(user_name, intra_extension_uuid)["sub_meta_rule_relations"]:
+ LOG.error("set_sub_meta_rule unknown MetaRule relation {}".format(relation))
+ raise IntraExtensionError("Bad input data.")
+ for cat in ("subject_categories", "object_categories", "action_categories"):
+ if cat not in sub_meta_rules[relation]:
+ LOG.error("set_sub_meta_rule category {} missed".format(cat))
+ raise IntraExtensionError("Bad input data.")
+ if type(sub_meta_rules[relation][cat]) is not list:
+ LOG.error("set_sub_meta_rule category {} is not a list".format(cat))
+ raise IntraExtensionError("Bad input data.")
+ subject_categories = self.get_subject_category_dict(user_name, intra_extension_uuid)
+ for data in sub_meta_rules[relation]["subject_categories"]:
+ if data not in subject_categories["subject_categories"]:
+ LOG.error("set_sub_meta_rule category {} is not part of subject_categories {}".format(
+ data, subject_categories))
+ raise IntraExtensionError("Bad input data.")
+ object_categories = self.get_object_category_dict(user_name, intra_extension_uuid)
+ for data in sub_meta_rules[relation]["object_categories"]:
+ if data not in object_categories["object_categories"]:
+ LOG.error("set_sub_meta_rule category {} is not part of object_categories {}".format(
+ data, object_categories))
+ raise IntraExtensionError("Bad input data.")
+ action_categories = self.get_action_category_dict(user_name, intra_extension_uuid)
+ for data in sub_meta_rules[relation]["action_categories"]:
+ if data not in action_categories["action_categories"]:
+ LOG.error("set_sub_meta_rule category {} is not part of action_categories {}".format(
+ data, action_categories))
+ raise IntraExtensionError("Bad input data.")
+ aggregation = self.driver.get_meta_rule_dict(intra_extension_uuid)["aggregation"]
+ return self.driver.set_meta_rule_dict(
+ intra_extension_uuid,
+ {
+ "aggregation": aggregation,
+ "sub_meta_rules": sub_meta_rules
+ })
+
+ # Sub-rules functions
+ @filter_args
+ @enforce("read", "sub_rules")
+ def get_sub_rules(self, user_name, intra_extension_uuid):
+ return self.driver.get_rules(intra_extension_uuid)
+
+ @filter_args
+ @enforce("read", "sub_rules")
+ @enforce("write", "sub_rules")
+ def set_sub_rule(self, user_name, intra_extension_uuid, relation, sub_rule):
+ for item in sub_rule:
+ if type(item) not in (str, unicode, bool):
+ raise IntraExtensionError("Bad input data (sub_rule).")
+ ref_rules = self.driver.get_rules(intra_extension_uuid)
+ _sub_rule = list(sub_rule)
+ if relation not in self.get_sub_meta_rule_relations(user_name, intra_extension_uuid)["sub_meta_rule_relations"]:
+ raise IntraExtensionError("Bad input data (rules).")
+ # filter strings in sub_rule
+ sub_rule = [filter_input(x) for x in sub_rule]
+ # check if length of sub_rule is correct from metadata_sub_rule
+ metadata_sub_rule = self.get_sub_meta_rule(user_name, intra_extension_uuid)
+ metadata_sub_rule_length = len(metadata_sub_rule['sub_meta_rules'][relation]["subject_categories"]) + \
+ len(metadata_sub_rule['sub_meta_rules'][relation]["action_categories"]) + \
+ len(metadata_sub_rule['sub_meta_rules'][relation]["object_categories"]) + 1
+ if metadata_sub_rule_length != len(sub_rule):
+ raise IntraExtensionError("Bad number of argument in sub_rule {}/{}".format(sub_rule,
+ metadata_sub_rule_length))
+ # check if each item in sub_rule match a corresponding scope value
+ for category in metadata_sub_rule['sub_meta_rules'][relation]["subject_categories"]:
+ item = _sub_rule.pop(0)
+ if item not in self.get_subject_category_scope_dict(
+ user_name,
+ intra_extension_uuid, category)["subject_category_scope"][category].keys():
+ raise IntraExtensionError("Bad subject value in sub_rule {}/{}".format(category, item))
+ for category in metadata_sub_rule['sub_meta_rules'][relation]["action_categories"]:
+ action_categories = self.get_action_category_scope_dict(
+ user_name,
+ intra_extension_uuid, category)["action_category_scope"][category]
+ item = _sub_rule.pop(0)
+ if item not in action_categories.keys():
+ self.moonlog_api.warning("set_sub_rule bad action value in sub_rule {}/{}".format(category, item))
+ raise IntraExtensionError("Bad input data.")
+ for category in metadata_sub_rule['sub_meta_rules'][relation]["object_categories"]:
+ item = _sub_rule.pop(0)
+ if item not in self.get_object_category_scope_dict(
+ user_name,
+ intra_extension_uuid, category)["object_category_scope"][category].keys():
+ raise IntraExtensionError("Bad object value in sub_rule {}/{}".format(category, item))
+ # check if relation is already there
+ if relation not in ref_rules["rules"]:
+ ref_rules["rules"][relation] = list()
+ # add sub_rule
+ ref_rules["rules"][relation].append(sub_rule)
+ return self.driver.set_rules(intra_extension_uuid, ref_rules["rules"])
+
+ @filter_args
+ @enforce("read", "sub_rules")
+ @enforce("write", "sub_rules")
+ def del_sub_rule(self, user_name, intra_extension_uuid, relation_name, rule):
+ ref_rules = self.driver.get_rules(intra_extension_uuid)
+ rule = rule.split("+")
+ for index, _item in enumerate(rule):
+ if "True" in _item:
+ rule[index] = True
+ if "False" in _item:
+ rule[index] = False
+ if relation_name in ref_rules["rules"]:
+ if rule in ref_rules["rules"][relation_name]:
+ ref_rules["rules"][relation_name].remove(rule)
+ else:
+ self.moonlog_api.error("Unknown rule: {}".format(rule))
+ else:
+ self.moonlog_api.error("Unknown relation name for rules: {}".format(relation_name))
+ return self.driver.set_rules(intra_extension_uuid, ref_rules["rules"])
+
+
+@dependency.provider('authz_api')
+@dependency.requires('identity_api', 'moonlog_api', 'tenant_api')
+class IntraExtensionAuthzManager(IntraExtensionManager):
+
+ __genre__ = "authz"
+
+ def authz(self, uuid, sub, obj, act):
+ """Check authorization for a particular action.
+
+ :param uuid: UUID of a tenant
+ :param sub: subject of the request
+ :param obj: object of the request
+ :param act: action of the request
+ :return: True or False or raise an exception
+ """
+ _uuid = self.tenant_api.get_extension_uuid(uuid, "authz")
+ return super(IntraExtensionAuthzManager, self).authz(_uuid, sub, obj, act)
+
+ def delete_intra_extension(self, intra_extension_id):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_subject_dict(self, user_name, intra_extension_uuid, subject_dict):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_subject_dict(self, user_name, intra_extension_uuid, subject_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_subject(self, user_name, intra_extension_uuid, subject_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_object_dict(self, user_name, intra_extension_uuid, object_dict):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_object_dict(self, user_name, intra_extension_uuid, object_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_object(self, user_name, intra_extension_uuid, object_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_action_dict(self, user_name, intra_extension_uuid, action_dict):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_action_dict(self, user_name, intra_extension_uuid, action_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_action(self, user_name, intra_extension_uuid, action_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_subject_category_dict(self, user_name, intra_extension_uuid, subject_category):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_subject_category_dict(self, user_name, intra_extension_uuid, subject_category_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_subject_category(self, user_name, intra_extension_uuid, subject_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_object_category_dict(self, user_name, intra_extension_uuid, object_category):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_object_category_dict(self, user_name, intra_extension_uuid, object_category_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_object_category(self, user_name, intra_extension_uuid, object_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_action_category_dict(self, user_name, intra_extension_uuid, action_category):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_action_category_dict(self, user_name, intra_extension_uuid, action_category_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_action_category(self, user_name, intra_extension_uuid, action_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_subject_category_scope_dict(self, user_name, intra_extension_uuid, category, scope):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_subject_category_scope_dict(self, user_name, intra_extension_uuid, subject_category, scope_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_subject_category_scope(self, user_name, intra_extension_uuid, subject_category, subject_category_scope):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_object_category_scope_dict(self, user_name, intra_extension_uuid, category, scope):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_object_category_scope_dict(self, user_name, intra_extension_uuid, object_category, scope_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_object_category_scope(self, user_name, intra_extension_uuid, object_category, object_category_scope):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_action_category_scope_dict(self, user_name, intra_extension_uuid, category, scope):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_action_category_scope_dict(self, user_name, intra_extension_uuid, action_category, scope_name):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_action_category_scope(self, user_name, intra_extension_uuid, action_category, action_category_scope):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_subject_category_assignment_dict(self, user_name, intra_extension_uuid, subject_uuid, assignment_dict):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_subject_category_assignment(self, user_name, intra_extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_subject_category_assignment_dict(self, user_name, intra_extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_object_category_assignment_dict(self, user_name, intra_extension_uuid, object_uuid, assignment_dict):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_object_category_assignment(self, user_name, intra_extension_uuid, object_uuid, category_uuid, scope_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_object_category_assignment_dict(self, user_name, intra_extension_uuid, object_uuid, category_uuid, scope_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_action_category_assignment_dict(self, user_name, intra_extension_uuid, action_uuid, assignment_dict):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_action_category_assignment(self, user_name, intra_extension_uuid, action_uuid, category_uuid, scope_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def add_action_category_assignment_dict(self, user_name, intra_extension_uuid, action_uuid, category_uuid, scope_uuid):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_aggregation_algorithm(self, user_name, intra_extension_uuid, aggregation_algorithm):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_sub_meta_rule(self, user_name, intra_extension_uuid, sub_meta_rules):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def set_sub_rule(self, user_name, intra_extension_uuid, relation, sub_rule):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+ def del_sub_rule(self, user_name, intra_extension_uuid, relation_name, rule):
+ raise AuthIntraExtensionModificationNotAuthorized()
+
+@dependency.provider('admin_api')
+@dependency.requires('identity_api', 'moonlog_api', 'tenant_api')
+class IntraExtensionAdminManager(IntraExtensionManager):
+
+ __genre__ = "admin"
+
+ # def set_perimeter_values(self, ie, policy_dir):
+ #
+ # # Check if object like "subjects", "objects", "actions" exist...
+ # perimeter_path = os.path.join(policy_dir, 'perimeter.json')
+ # f = open(perimeter_path)
+ # json_perimeter = json.load(f)
+ # for item in ("subjects", "objects", "actions"):
+ # if item not in json_perimeter["objects"]:
+ # raise AdminIntraExtensionCreationError()
+ #
+ # super(IntraExtensionAdminManager, self).set_perimeter_values(ie, policy_dir)
+ #
+ # @filter_args
+ # def add_subject_dict(self, user_name, uuid, subject_uuid):
+ # raise AdminIntraExtensionModificationNotAuthorized()
+ #
+ # @filter_args
+ # def del_subject(self, user_name, uuid, subject_uuid):
+ # raise AdminIntraExtensionModificationNotAuthorized()
+
+
+class AuthzDriver(object):
+
+ def get_subject_category_list(self, extension_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_object_category_list(self, extension_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_action_category_list(self, extension_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_subject_category_value_dict(self, extension_uuid, subject_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_object_category_value_dict(self, extension_uuid, object_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_action_category_value_dict(self, extension_uuid, action_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_meta_rule(self, extension_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_rules(self, extension_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+
+class UpdateDriver(object):
+
+ def get_intra_extensions(self):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_intra_extension(self, extension_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def create_intra_extensions(self, extension_uuid, intra_extension):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def delete_intra_extensions(self, extension_uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and setter for tenant
+
+ def get_tenant(self, uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_tenant(self, uuid, tenant_id):
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and setter for name
+
+ def get_name(self, uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_name(self, uuid, name):
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and setter for model
+
+ def get_model(self, uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_model(self, uuid, model):
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and setter for genre
+
+ def get_genre(self, uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_genre(self, uuid, genre):
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and setter for description
+
+ def get_description(self, uuid):
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_description(self, uuid, args):
+ raise exception.NotImplemented() # pragma: no cover
+
+
+class IntraExtensionDriver(object):
+
+ # Getter ad Setter for subjects
+
+ def get_subject_dict(self, extension_uuid):
+ """Get the list of subject for that IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing all subjects for that IntraExtension, eg. {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_subject_dict(self, extension_uuid, subject_dict):
+ """Set the list of subject for that IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_dict: dict of subject: {"uuid1": "name1", "uuid2": "name2"}
+ :type subject_dict: dict
+ :return: a dictionary containing all subjects for that IntraExtension, eg. {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_subject(self, extension_uuid, subject_uuid, subject_name):
+ """Add a subject
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_uuid: Subject UUID
+ :type subject_uuid: string
+ :param subject_name: Subject name
+ :type subject_name: string
+ :return: the added subject {"uuid1": "name1"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_subject(self, extension_uuid, subject_uuid):
+ """Remove a subject
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_uuid: Subject UUID
+ :type subject_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter ad Setter for objects
+
+ def get_object_dict(self, extension_uuid):
+ """Get the list of object for that IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing all objects for that IntraExtension, eg. {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_object_dict(self, extension_uuid, object_dict):
+ """Set the list of object for that IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_dict: dict of object: {"uuid1": "name1", "uuid2": "name2"}
+ :type object_dict: dict
+ :return: a dictionary containing all objects for that IntraExtension, eg. {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_object(self, extension_uuid, object_uuid, object_name):
+ """Ad an object
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_uuid: Object UUID
+ :type object_uuid: string
+ :param object_name: Object name
+ :type object_name: string
+ :return: the added object {"uuid1": "name1"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_object(self, extension_uuid, object_uuid):
+ """Remove an object
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_uuid: Object UUID
+ :type object_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter ad Setter for actions
+
+ def get_action_dict(self, extension_uuid):
+ """ Get the list of action for that IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing all actions for that IntraExtension, eg. {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_action_dict(self, extension_uuid, action_dict):
+ """ Set the list of action for that IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_dict: dict of actions: {"uuid1": "name1", "uuid2": "name2"}
+ :type action_dict: dict
+ :return: a dictionary containing all actions for that IntraExtension, eg. {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_action(self, extension_uuid, action_uuid, action_name):
+ """Ad an action
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_uuid: Action UUID
+ :type action_uuid: string
+ :param action_name: Action name
+ :type action_name: string
+ :return: the added action {"uuid1": "name1"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_action(self, extension_uuid, action_uuid):
+ """Remove an action
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_uuid: Action UUID
+ :type action_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter ad Setter for subject_category
+
+ def get_subject_category_dict(self, extension_uuid):
+ """Get a list of all subject categories
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing all subject categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_subject_category_dict(self, extension_uuid, subject_categories):
+ """Set the list of all subject categories
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_categories: dict of subject categories {"uuid1": "name1", "uuid2": "name2"}
+ :type subject_categories: dict
+ :return: a dictionary containing all subject categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_subject_category_dict(self, extension_uuid, subject_category_uuid, subject_category_name):
+ """Add a subject category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_category_uuid: the UUID of the subject category
+ :type subject_category_uuid: string
+ :param subject_category_name: the name of the subject category
+ :type subject_category_name: string
+ :return: a dictionnary with the subject catgory added {"uuid1": "name1"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_subject_category(self, extension_uuid, subject_category_uuid):
+ """Remove one subject category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_category_uuid: the UUID of subject category to remove
+ :type subject_category_uuid: string
+ :return: a dictionary containing all subject categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter ad Setter for object_category
+
+ def get_object_category_dict(self, extension_uuid):
+ """Get a list of all object categories
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing all object categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_object_category_dict(self, extension_uuid, object_categories):
+ """Set the list of all object categories
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_categories: dict of object categories {"uuid1": "name1", "uuid2": "name2"}
+ :type object_categories: dict
+ :return: a dictionary containing all object categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_object_category_dict(self, extension_uuid, object_category_uuid, object_category_name):
+ """Add a object category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_category_uuid: the UUID of the object category
+ :type object_category_uuid: string
+ :param object_category_name: the name of the object category
+ :type object_category_name: string
+ :return: a dictionnary with the object catgory added {"uuid1": "name1"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_object_category(self, extension_uuid, object_category_uuid):
+ """Remove one object category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_category_uuid: the UUID of object category to remove
+ :type object_category_uuid: string
+ :return: a dictionary containing all object categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter ad Setter for action_category
+
+ def get_action_category_dict(self, extension_uuid):
+ """Get a list of all action categories
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing all action categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_action_category_dict(self, extension_uuid, action_categories):
+ """Set the list of all action categories
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_categories: dict of action categories {"uuid1": "name1", "uuid2": "name2"}
+ :type action_categories: dict
+ :return: a dictionary containing all action categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_action_category_dict(self, extension_uuid, action_category_uuid, action_category_name):
+ """Add a action category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_category_uuid: the UUID of the action category
+ :type action_category_uuid: string
+ :param action_category_name: the name of the action category
+ :type action_category_name: string
+ :return: a dictionnary with the action catgory added {"uuid1": "name1"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_action_category(self, extension_uuid, action_category_uuid):
+ """Remove one action category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_category_uuid: the UUID of action category to remove
+ :type action_category_uuid: string
+ :return: a dictionary containing all action categories {"uuid1": "name1", "uuid2": "name2"}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for subject_category_value_scope
+
+ def get_subject_category_scope_dict(self, extension_uuid, category):
+ """Get a list of all subject category scope
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param category: the category UUID where the scope values are
+ :type category: string
+ :return: a dictionary containing all subject category scope {"category1": {"scope_uuid1": "scope_name1}}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_subject_category_scope_dict(self, extension_uuid, subject_category, scope):
+ """Set the list of all scope for that subject category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_category: the UUID of the subject category where this scope will be set
+ :type subject_category: string
+ :return: a dictionary containing all scope {"scope_uuid1": "scope_name1, "scope_uuid2": "scope_name2}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_subject_category_scope_dict(self, extension_uuid, subject_category, scope_uuid, scope_name):
+ """Add a subject category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_category: the subject category UUID where the scope will be added
+ :type subject_category: string
+ :param scope_uuid: the UUID of the subject category
+ :type scope_uuid: string
+ :param scope_name: the name of the subject category
+ :type scope_name: string
+ :return: a dictionary containing the subject category scope added {"category1": {"scope_uuid1": "scope_name1}}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_subject_category_scope_dict(self, extension_uuid, subject_category, scope_uuid):
+ """Remove one scope belonging to a subject category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_category: the UUID of subject categorywhere we can find the scope to remove
+ :type subject_category: string
+ :param scope_uuid: the UUID of the scope to remove
+ :type scope_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for object_category_scope
+
+ def get_object_category_scope_dict(self, extension_uuid, category):
+ """Get a list of all object category scope
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param category: the category UUID where the scope values are
+ :type category: string
+ :return: a dictionary containing all object category scope {"category1": {"scope_uuid1": "scope_name1}}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_object_category_scope_dict(self, extension_uuid, object_category, scope):
+ """Set the list of all scope for that object category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_category: the UUID of the object category where this scope will be set
+ :type object_category: string
+ :return: a dictionary containing all scope {"scope_uuid1": "scope_name1, "scope_uuid2": "scope_name2}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_object_category_scope_dict(self, extension_uuid, object_category, scope_uuid, scope_name):
+ """Add a object category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_category: the object category UUID where the scope will be added
+ :type object_category: string
+ :param scope_uuid: the UUID of the object category
+ :type scope_uuid: string
+ :param scope_name: the name of the object category
+ :type scope_name: string
+ :return: a dictionary containing the object category scope added {"category1": {"scope_uuid1": "scope_name1}}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_object_category_scope_dict(self, extension_uuid, object_category, scope_uuid):
+ """Remove one scope belonging to a object category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_category: the UUID of object categorywhere we can find the scope to remove
+ :type object_category: string
+ :param scope_uuid: the UUID of the scope to remove
+ :type scope_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for action_category_scope
+
+ def get_action_category_scope_dict(self, extension_uuid, category):
+ """Get a list of all action category scope
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param category: the category UUID where the scope values are
+ :type category: string
+ :return: a dictionary containing all action category scope {"category1": {"scope_uuid1": "scope_name1}}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_action_category_scope_dict(self, extension_uuid, action_category, scope):
+ """Set the list of all scope for that action category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_category: the UUID of the action category where this scope will be set
+ :type action_category: string
+ :return: a dictionary containing all scope {"scope_uuid1": "scope_name1, "scope_uuid2": "scope_name2}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_action_category_scope_dict(self, extension_uuid, action_category, scope_uuid, scope_name):
+ """Add a action category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_category: the action category UUID where the scope will be added
+ :type action_category: string
+ :param scope_uuid: the UUID of the action category
+ :type scope_uuid: string
+ :param scope_name: the name of the action category
+ :type scope_name: string
+ :return: a dictionary containing the action category scope added {"category1": {"scope_uuid1": "scope_name1}}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_action_category_scope_dict(self, extension_uuid, action_category, scope_uuid):
+ """Remove one scope belonging to a action category
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_category: the UUID of action categorywhere we can find the scope to remove
+ :type action_category: string
+ :param scope_uuid: the UUID of the scope to remove
+ :type scope_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for subject_category_assignment
+
+ def get_subject_category_assignment_dict(self, extension_uuid, subject_uuid):
+ """Get the assignment for a given subject_uuid
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_uuid: subject UUID
+ :type subject_uuid: string
+ :return: a dictionary of assignment for the given subject {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_subject_category_assignment_dict(self, extension_uuid, subject_uuid, assignment_dict):
+ """Set the assignment for a given subject_uuid
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_uuid: subject UUID
+ :type subject_uuid: string
+ :param assignment_dict: the assignment dictionary {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ :type assignment_dict: dict
+ :return: a dictionary of assignment for the given subject {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_subject_category_assignment_dict(self, extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ """Add a scope to a category and to a subject
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_uuid: the subject UUID
+ :type subject_uuid: string
+ :param category_uuid: the category UUID
+ :type category_uuid: string
+ :param scope_uuid: the scope UUID
+ :type scope_uuid: string
+ :return: a dictionary of assignment for the given subject {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_subject_category_assignment(self, extension_uuid, subject_uuid, category_uuid, scope_uuid):
+ """Remove a scope from a category and from a subject
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param subject_uuid: the subject UUID
+ :type subject_uuid: string
+ :param category_uuid: the category UUID
+ :type category_uuid: string
+ :param scope_uuid: the scope UUID
+ :type scope_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for object_category_assignment
+
+ def get_object_category_assignment_dict(self, extension_uuid, object_uuid):
+ """Get the assignment for a given object_uuid
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_uuid: object UUID
+ :type object_uuid: string
+ :return: a dictionary of assignment for the given object {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_object_category_assignment_dict(self, extension_uuid, object_uuid, assignment_dict):
+ """Set the assignment for a given object_uuid
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_uuid: object UUID
+ :type object_uuid: string
+ :param assignment_dict: the assignment dictionary {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ :type assignment_dict: dict
+ :return: a dictionary of assignment for the given object {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_object_category_assignment_dict(self, extension_uuid, object_uuid, category_uuid, scope_uuid):
+ """Add a scope to a category and to a object
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_uuid: the object UUID
+ :type object_uuid: string
+ :param category_uuid: the category UUID
+ :type category_uuid: string
+ :param scope_uuid: the scope UUID
+ :type scope_uuid: string
+ :return: a dictionary of assignment for the given object {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_object_category_assignment(self, extension_uuid, object_uuid, category_uuid, scope_uuid):
+ """Remove a scope from a category and from a object
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param object_uuid: the object UUID
+ :type object_uuid: string
+ :param category_uuid: the category UUID
+ :type category_uuid: string
+ :param scope_uuid: the scope UUID
+ :type scope_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for action_category_assignment
+
+ def get_action_category_assignment_dict(self, extension_uuid, action_uuid):
+ """Get the assignment for a given action_uuid
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_uuid: action UUID
+ :type action_uuid: string
+ :return: a dictionary of assignment for the given action {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_action_category_assignment_dict(self, extension_uuid, action_uuid, assignment_dict):
+ """Set the assignment for a given action_uuid
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_uuid: action UUID
+ :type action_uuid: string
+ :param assignment_dict: the assignment dictionary {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ :type assignment_dict: dict
+ :return: a dictionary of assignment for the given action {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def add_action_category_assignment_dict(self, extension_uuid, action_uuid, category_uuid, scope_uuid):
+ """Add a scope to a category and to a action
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_uuid: the action UUID
+ :type action_uuid: string
+ :param category_uuid: the category UUID
+ :type category_uuid: string
+ :param scope_uuid: the scope UUID
+ :type scope_uuid: string
+ :return: a dictionary of assignment for the given action {"cat1": ["scope_uuid1", "scope_uuid2"]}
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def remove_action_category_assignment(self, extension_uuid, action_uuid, category_uuid, scope_uuid):
+ """Remove a scope from a category and from a action
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param action_uuid: the action UUID
+ :type action_uuid: string
+ :param category_uuid: the category UUID
+ :type category_uuid: string
+ :param scope_uuid: the scope UUID
+ :type scope_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for meta_rule
+
+ def get_meta_rule_dict(self, extension_uuid):
+ """Get the Meta rule
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing the meta_rule
+
+ Here is an example of a meta_rule:
+ {
+ "sub_meta_rules": {
+ "relation_super": {
+ "subject_categories": ["role"],
+ "action_categories": ["computing_action"],
+ "object_categories": ["id"],
+ "relation": "relation_super"
+ }
+ },
+ "aggregation": "and_true_aggregation"
+ }
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_meta_rule_dict(self, extension_uuid, meta_rule):
+ """Set the Meta rule
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param meta_rule: a dictionary representing the meta_rule (see below)
+ :return:a dictionary containing the meta_rule
+
+ Here is an example of a meta_rule:
+ {
+ "sub_meta_rules": {
+ "relation_super": {
+ "subject_categories": ["role"],
+ "action_categories": ["computing_action"],
+ "object_categories": ["id"],
+ "relation": "relation_super"
+ }
+ },
+ "aggregation": "and_true_aggregation"
+ }
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for rules
+
+ def get_rules(self, extension_uuid):
+ """Get all rules
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: a dictionary containing rules ie.
+ {
+ "relation_super":[
+ ["admin", "vm_admin", "servers", True],
+ ["admin", "vm_access", "servers", True]
+ ]
+ }
+ All items will be UUID.
+ The last boolean item is the positive/negative value. If True, request that conforms to that rule
+ will be authorized, if false, request will be rejected.
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_rules(self, extension_uuid, rules):
+ """Set all rules
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param rules: a dictionary containing rules (see below)
+ :type rules: dict
+ :return: a dictionary containing rules ie.
+ {
+ "relation_super":[
+ ["admin", "vm_admin", "servers", True],
+ ["admin", "vm_access", "servers", True]
+ ]
+ }
+ All items will be UUID.
+ The last boolean item is the positive/negative value. If True, request that conforms to that rule
+ will be authorized, if false, request will be rejected.
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ # Getter and Setter for intra_extension
+
+ def get_intra_extension_list(self):
+ """Get a list of IntraExtension UUIDs
+
+ :return: a list of IntraExtension UUIDs ["uuid1", "uuid2"]
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_intra_extension_dict(self, extension_uuid):
+ """Get a description of an IntraExtension
+
+ :param extension_uuid: the UUID of the IntraExtension
+ :type extension_uuid: string
+ :return:
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def set_intra_extension(self, extension_uuid, extension_dict):
+ """Set a new IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :param extension_dict: a dictionary withe the description of the IntraExtension (see below)
+ :type extension_dict: dict
+ :return: the IntraExtension dictionary, example:
+ {
+ "id": "uuid1",
+ "name": "Name of the intra_extension",
+ "model": "Model of te intra_extension (admin or authz)"
+ "description": "a description of the intra_extension"
+ }
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def delete_intra_extension(self, extension_uuid):
+ """Delete an IntraExtension
+
+ :param extension_uuid: IntraExtension UUID
+ :type extension_uuid: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_sub_meta_rule_relations(self, username, uuid):
+ # TODO: check which relations are really usable
+ return {"sub_meta_rule_relations": ["relation_super", "relation_test"]}
+
+
+class LogDriver(object):
+
+ def authz(self, message):
+ """Log authorization message
+
+ :param message: the message to log
+ :type message: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def debug(self, message):
+ """Log debug message
+
+ :param message: the message to log
+ :type message: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def info(self, message):
+ """Log informational message
+
+ :param message: the message to log
+ :type message: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def warning(self, message):
+ """Log warning message
+
+ :param message: the message to log
+ :type message: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def error(self, message):
+ """Log error message
+
+ :param message: the message to log
+ :type message: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def critical(self, message):
+ """Log critical message
+
+ :param message: the message to log
+ :type message: string
+ :return: None
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ def get_logs(self, options):
+ """Get logs
+
+ :param options: options to filter log events
+ :type options: string eg: "event_number=10,from=2014-01-01-10:10:10,to=2014-01-01-12:10:10,filter=expression"
+ :return: a list of log events
+
+ TIME_FORMAT is '%Y-%m-%d-%H:%M:%S'
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+# @dependency.provider('superextension_api')
+# class SuperExtensionManager(manager.Manager):
+#
+# def __init__(self):
+# driver = CONF.moon.superextension_driver
+# super(SuperExtensionManager, self).__init__(driver)
+#
+# def authz(self, sub, obj, act):
+# #return self.driver.admin(sub, obj, act)
+# return True
+
+
+# @dependency.provider('interextension_api')
+# @dependency.requires('identity_api')
+# class InterExtensionManager(manager.Manager):
+#
+# def __init__(self):
+# driver = CONF.moon.interextension_driver
+# super(InterExtensionManager, self).__init__(driver)
+#
+# def check_inter_extension(self, uuid):
+# if uuid not in self.get_inter_extensions():
+# LOG.error("Unknown InterExtension {}".format(uuid))
+# raise exception.NotFound("InterExtension not found.")
+#
+# def get_inter_extensions(self):
+# return self.driver.get_inter_extensions()
+#
+# def get_inter_extension(self, uuid):
+# return self.driver.get_inter_extension(uuid)
+#
+# def create_inter_extension(self, inter_extension):
+# ie = dict()
+# ie['id'] = uuid4().hex
+# ie["requesting_intra_extension_uuid"] = filter_input(inter_extension["requesting_intra_extension_uuid"])
+# ie["requested_intra_extension_uuid"] = filter_input(inter_extension["requested_intra_extension_uuid"])
+# ie["description"] = filter_input(inter_extension["description"])
+# ie["virtual_entity_uuid"] = filter_input(inter_extension["virtual_entity_uuid"])
+# ie["genre"] = filter_input(inter_extension["genre"])
+#
+# ref = self.driver.create_inter_extensions(ie['id'], ie)
+# return ref
+#
+# def delete_inter_extension(self, inter_extension_id):
+# LOG.error("Deleting {}".format(inter_extension_id))
+# ref = self.driver.delete_inter_extensions(inter_extension_id)
+# return ref
+#
+#
+# class SuperExtensionDriver(object):
+#
+# def __init__(self):
+# self.__super_extension = None
+#
+# def admin(self, sub, obj, act):
+# return self.__super_extension.authz(sub, obj, act)
+#
+# def delegate(self, delegating_uuid, delegated_uuid, privilege): # TODO later
+# pass
+#
+# # Getter and Setter for SuperExtensions
+#
+# def get_super_extensions(self):
+# raise exception.NotImplemented() # pragma: no cover
+#
+# def create_super_extensions(self, super_id, super_extension):
+# raise exception.NotImplemented() # pragma: no cover
+#
+#
+# class InterExtensionDriver(object):
+#
+# # Getter and Setter for InterExtensions
+#
+# def get_inter_extensions(self):
+# raise exception.NotImplemented() # pragma: no cover
+#
+# def get_inter_extension(self, uuid):
+# raise exception.NotImplemented() # pragma: no cover
+#
+# def create_inter_extensions(self, intra_id, intra_extension):
+# raise exception.NotImplemented() # pragma: no cover
+#
+# def delete_inter_extensions(self, intra_extension_id):
+# raise exception.NotImplemented() # pragma: no cover
+#
+#
+# class VirtualEntityDriver(object):
+#
+# # Getter and Setter for InterExtensions
+#
+# def get_virtual_entities(self):
+# raise exception.NotImplemented() # pragma: no cover
+#
+# def create_virtual_entities(self, ve_id, virtual_entity):
+# raise exception.NotImplemented() # pragma: no cover
+
diff --git a/keystone-moon/keystone/contrib/moon/exception.py b/keystone-moon/keystone/contrib/moon/exception.py
new file mode 100644
index 00000000..20a7d737
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/exception.py
@@ -0,0 +1,112 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+from keystone.common import dependency
+from keystone.exception import Error
+from keystone.i18n import _, _LW
+
+@dependency.requires('moonlog_api')
+class TenantError(Error):
+ message_format = _("There is an error requesting this tenant"
+ " the server could not comply with the request"
+ " since it is either malformed or otherwise"
+ " incorrect. The client is assumed to be in error.")
+ code = 400
+ title = 'Tenant Error'
+ logger = "ERROR"
+
+ def __del__(self):
+ if self.logger == "ERROR":
+ self.moonlog_api.error(self.message_format)
+ elif self.logger == "WARNING":
+ self.moonlog_api.warning(self.message_format)
+ elif self.logger == "CRITICAL":
+ self.moonlog_api.critical(self.message_format)
+ elif self.logger == "AUTHZ":
+ self.moonlog_api.authz(self.message_format)
+ self.moonlog_api.error(self.message_format)
+ else:
+ self.moonlog_api.info(self.message_format)
+
+
+
+class TenantListEmptyError(TenantError):
+ message_format = _("The tenant list mapping is empty, you must set the mapping first.")
+ code = 400
+ title = 'Tenant List Empty Error'
+
+
+class TenantNotFoundError(TenantError):
+ message_format = _("The tenant UUID was not found.")
+ code = 400
+ title = 'Tenant UUID Not Found Error'
+
+
+class IntraExtensionError(TenantError):
+ message_format = _("There is an error requesting this IntraExtension.")
+ code = 400
+ title = 'Extension Error'
+
+
+class CategoryNotFound(IntraExtensionError):
+ message_format = _("The category is unknown.")
+ code = 400
+ title = 'Extension Error'
+ logger = "WARNING"
+
+
+class IntraExtensionUnMapped(TenantError):
+ message_format = _("The Extension is not mapped to a tenant.")
+ code = 400
+ title = 'Extension UUID Not Found Error'
+ logger = "WARNING"
+
+
+class IntraExtensionNotFound(IntraExtensionError):
+ message_format = _("The Extension for that tenant is unknown.")
+ code = 400
+ title = 'Extension UUID Not Found Error'
+ logger = "WARNING"
+
+
+class IntraExtensionNotAuthorized(IntraExtensionError):
+ message_format = _("User has no authorization for that action.")
+ code = 400
+ title = 'Authorization Error'
+ logger = "AUTHZ"
+
+
+class AdminIntraExtensionNotFound(IntraExtensionNotFound):
+ message_format = _("The admin Extension for that tenant is unknown.")
+ code = 400
+ title = 'Admin Extension UUID Not Found Error'
+ logger = "WARNING"
+
+
+class AdminIntraExtensionCreationError(IntraExtensionError):
+ message_format = _("The arguments for the creation of this admin Extension were malformed.")
+ code = 400
+ title = 'Admin Extension Creation Error'
+
+
+class AdminIntraExtensionModificationNotAuthorized(IntraExtensionError):
+ message_format = _("The modification of this admin Extension is not authorizaed.")
+ code = 400
+ title = 'Admin Extension Creation Error'
+ logger = "AUTHZ"
+
+class AuthIntraExtensionModificationNotAuthorized(IntraExtensionError):
+ message_format = _("The modification of this authz Extension is not authorizaed.")
+ code = 400
+ title = 'Authz Extension Creation Error'
+ logger = "AUTHZ"
+
+
+class AuthzIntraExtensionNotFound(IntraExtensionNotFound):
+ message_format = _("The authz Extension for that tenant is unknown.")
+ code = 400
+ title = 'Authz Extension UUID Not Found Error'
+ logger = "WARNING"
+
diff --git a/keystone-moon/keystone/contrib/moon/extension.py b/keystone-moon/keystone/contrib/moon/extension.py
new file mode 100644
index 00000000..efee55c5
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/extension.py
@@ -0,0 +1,740 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import os.path
+import copy
+import json
+import itertools
+from uuid import uuid4
+import logging
+
+LOG = logging.getLogger("moon.authz")
+
+
+class Metadata:
+
+ def __init__(self):
+ self.__name = ''
+ self.__model = ''
+ self.__genre = ''
+ self.__description = ''
+ self.__subject_categories = list()
+ self.__object_categories = list()
+ self.__meta_rule = dict()
+ self.__meta_rule['sub_meta_rules'] = list()
+ self.__meta_rule['aggregation'] = ''
+
+ def load_from_json(self, extension_setting_dir):
+ metadata_path = os.path.join(extension_setting_dir, 'metadata.json')
+ f = open(metadata_path)
+ json_metadata = json.load(f)
+ self.__name = json_metadata['name']
+ self.__model = json_metadata['model']
+ self.__genre = json_metadata['genre']
+ self.__description = json_metadata['description']
+ self.__subject_categories = copy.deepcopy(json_metadata['subject_categories'])
+ self.__object_categories = copy.deepcopy(json_metadata['object_categories'])
+ self.__meta_rule = copy.deepcopy(json_metadata['meta_rule'])
+
+ def get_name(self):
+ return self.__name
+
+ def get_genre(self):
+ return self.__genre
+
+ def get_model(self):
+ return self.__model
+
+ def get_subject_categories(self):
+ return self.__subject_categories
+
+ def get_object_categories(self):
+ return self.__object_categories
+
+ def get_meta_rule(self):
+ return self.__meta_rule
+
+ def get_meta_rule_aggregation(self):
+ return self.__meta_rule['aggregation']
+
+ def get_data(self):
+ data = dict()
+ data["name"] = self.get_name()
+ data["model"] = self.__model
+ data["genre"] = self.__genre
+ data["description"] = self.__description
+ data["subject_categories"] = self.get_subject_categories()
+ data["object_categories"] = self.get_object_categories()
+ data["meta_rule"] = dict(self.get_meta_rule())
+ return data
+
+ def set_data(self, data):
+ self.__name = data["name"]
+ self.__model = data["model"]
+ self.__genre = data["genre"]
+ self.__description = data["description"]
+ self.__subject_categories = list(data["subject_categories"])
+ self.__object_categories = list(data["object_categories"])
+ self.__meta_rule = dict(data["meta_rule"])
+
+
+class Configuration:
+ def __init__(self):
+ self.__subject_category_values = dict()
+ # examples: { "role": {"admin", "dev", }, }
+ self.__object_category_values = dict()
+ self.__rules = list()
+
+ def load_from_json(self, extension_setting_dir):
+ configuration_path = os.path.join(extension_setting_dir, 'configuration.json')
+ f = open(configuration_path)
+ json_configuration = json.load(f)
+ self.__subject_category_values = copy.deepcopy(json_configuration['subject_category_values'])
+ self.__object_category_values = copy.deepcopy(json_configuration['object_category_values'])
+ self.__rules = copy.deepcopy(json_configuration['rules']) # TODO: currently a list, will be a dict with sub-meta-rule as key
+
+ def get_subject_category_values(self):
+ return self.__subject_category_values
+
+ def get_object_category_values(self):
+ return self.__object_category_values
+
+ def get_rules(self):
+ return self.__rules
+
+ def get_data(self):
+ data = dict()
+ data["subject_category_values"] = self.get_subject_category_values()
+ data["object_category_values"] = self.get_object_category_values()
+ data["rules"] = self.get_rules()
+ return data
+
+ def set_data(self, data):
+ self.__subject_category_values = list(data["subject_category_values"])
+ self.__object_category_values = list(data["object_category_values"])
+ self.__rules = list(data["rules"])
+
+
+class Perimeter:
+ def __init__(self):
+ self.__subjects = list()
+ self.__objects = list()
+
+ def load_from_json(self, extension_setting_dir):
+ perimeter_path = os.path.join(extension_setting_dir, 'perimeter.json')
+ f = open(perimeter_path)
+ json_perimeter = json.load(f)
+ self.__subjects = copy.deepcopy(json_perimeter['subjects'])
+ self.__objects = copy.deepcopy(json_perimeter['objects'])
+ # print(self.__subjects)
+ # print(self.__objects)
+
+ def get_subjects(self):
+ return self.__subjects
+
+ def get_objects(self):
+ return self.__objects
+
+ def get_data(self):
+ data = dict()
+ data["subjects"] = self.get_subjects()
+ data["objects"] = self.get_objects()
+ return data
+
+ def set_data(self, data):
+ self.__subjects = list(data["subjects"])
+ self.__objects = list(data["objects"])
+
+
+class Assignment:
+ def __init__(self):
+ self.__subject_category_assignments = dict()
+ # examples: { "role": {"user1": {"dev"}, "user2": {"admin",}}, } TODO: limit to one value for each attr
+ self.__object_category_assignments = dict()
+
+ def load_from_json(self, extension_setting_dir):
+ assignment_path = os.path.join(extension_setting_dir, 'assignment.json')
+ f = open(assignment_path)
+ json_assignment = json.load(f)
+
+ self.__subject_category_assignments = dict(copy.deepcopy(json_assignment['subject_category_assignments']))
+ self.__object_category_assignments = dict(copy.deepcopy(json_assignment['object_category_assignments']))
+
+ def get_subject_category_assignments(self):
+ return self.__subject_category_assignments
+
+ def get_object_category_assignments(self):
+ return self.__object_category_assignments
+
+ def get_data(self):
+ data = dict()
+ data["subject_category_assignments"] = self.get_subject_category_assignments()
+ data["object_category_assignments"] = self.get_object_category_assignments()
+ return data
+
+ def set_data(self, data):
+ self.__subject_category_assignments = list(data["subject_category_assignments"])
+ self.__object_category_assignments = list(data["object_category_assignments"])
+
+
+class AuthzData:
+ def __init__(self, sub, obj, act):
+ self.validation = "False" # "OK, KO, Out of Scope" # "auth": False,
+ self.subject = sub
+ self.object = str(obj)
+ self.action = str(act)
+ self.type = "" # intra-tenant, inter-tenant, Out of Scope
+ self.subject_attrs = dict()
+ self.object_attrs = dict()
+ self.requesting_tenant = "" # "subject_tenant": subject_tenant,
+ self.requested_tenant = "" # "object_tenant": object_tenant,
+
+ def __str__(self):
+ return """AuthzData:
+ validation={}
+ subject={}
+ object={}
+ action={}
+ """.format(self.validation, self.subject, self.object, self.action)
+
+
+class Extension:
+ def __init__(self):
+ self.metadata = Metadata()
+ self.configuration = Configuration()
+ self.perimeter = Perimeter()
+ self.assignment = Assignment()
+
+ def load_from_json(self, extension_setting_dir):
+ self.metadata.load_from_json(extension_setting_dir)
+ self.configuration.load_from_json(extension_setting_dir)
+ self.perimeter.load_from_json(extension_setting_dir)
+ self.assignment.load_from_json(extension_setting_dir)
+
+ def get_name(self):
+ return self.metadata.get_name()
+
+ def get_genre(self):
+ return self.metadata.get_genre()
+
+ def authz(self, sub, obj, act):
+ authz_data = AuthzData(sub, obj, act)
+ # authz_logger.warning('extension/authz request: [sub {}, obj {}, act {}]'.format(sub, obj, act))
+
+ if authz_data.subject in self.perimeter.get_subjects() and authz_data.object in self.perimeter.get_objects():
+
+ for subject_category in self.metadata.get_subject_categories():
+ authz_data.subject_attrs[subject_category] = copy.copy(
+ # self.assignment.get_subject_category_attr(subject_category, sub)
+ self.assignment.get_subject_category_assignments()[subject_category][sub]
+ )
+ # authz_logger.warning('extension/authz subject attribute: [subject attr: {}]'.format(
+ # #self.assignment.get_subject_category_attr(subject_category, sub))
+ # self.assignment.get_subject_category_assignments()[subject_category][sub])
+ # )
+
+ for object_category in self.metadata.get_object_categories():
+ if object_category == 'action':
+ authz_data.object_attrs[object_category] = [act]
+ # authz_logger.warning('extension/authz object attribute: [object attr: {}]'.format([act]))
+ else:
+ authz_data.object_attrs[object_category] = copy.copy(
+ self.assignment.get_object_category_assignments()[object_category][obj]
+ )
+ # authz_logger.warning('extension/authz object attribute: [object attr: {}]'.format(
+ # self.assignment.get_object_category_assignments()[object_category][obj])
+ # )
+
+ _aggregation_data = dict()
+
+ for sub_meta_rule in self.metadata.get_meta_rule()["sub_meta_rules"].values():
+ _tmp_relation_args = list()
+
+ for sub_subject_category in sub_meta_rule["subject_categories"]:
+ _tmp_relation_args.append(authz_data.subject_attrs[sub_subject_category])
+
+ for sub_object_category in sub_meta_rule["object_categories"]:
+ _tmp_relation_args.append(authz_data.object_attrs[sub_object_category])
+
+ _relation_args = list(itertools.product(*_tmp_relation_args))
+
+ if sub_meta_rule['relation'] == 'relation_super': # TODO: replace by Prolog Engine
+ _aggregation_data['relation_super'] = dict()
+ _aggregation_data['relation_super']['result'] = False
+ for _relation_arg in _relation_args:
+ if list(_relation_arg) in self.configuration.get_rules()[sub_meta_rule['relation']]:
+ # authz_logger.warning(
+ # 'extension/authz relation super OK: [sub_sl: {}, obj_sl: {}, action: {}]'.format(
+ # _relation_arg[0], _relation_arg[1], _relation_arg[2]
+ # )
+ # )
+ _aggregation_data['relation_super']['result'] = True
+ break
+ _aggregation_data['relation_super']['status'] = 'finished'
+
+ elif sub_meta_rule['relation'] == 'permission':
+ _aggregation_data['permission'] = dict()
+ _aggregation_data['permission']['result'] = False
+ for _relation_arg in _relation_args:
+ if list(_relation_arg) in self.configuration.get_rules()[sub_meta_rule['relation']]:
+ # authz_logger.warning(
+ # 'extension/authz relation permission OK: [role: {}, object: {}, action: {}]'.format(
+ # _relation_arg[0], _relation_arg[1], _relation_arg[2]
+ # )
+ # )
+ _aggregation_data['permission']['result'] = True
+ break
+ _aggregation_data['permission']['status'] = 'finished'
+
+ if self.metadata.get_meta_rule_aggregation() == 'and_true_aggregation':
+ authz_data.validation = "OK"
+ for relation in _aggregation_data:
+ if _aggregation_data[relation]['status'] == 'finished' \
+ and _aggregation_data[relation]['result'] == False:
+ authz_data.validation = "KO"
+ else:
+ authz_data.validation = 'Out of Scope'
+
+ return authz_data.validation
+
+ # ---------------- metadate api ----------------
+
+ def get_subject_categories(self):
+ return self.metadata.get_subject_categories()
+
+ def add_subject_category(self, category_id):
+ if category_id in self.get_subject_categories():
+ return "[ERROR] Add Subject Category: Subject Category Exists"
+ else:
+ self.get_subject_categories().append(category_id)
+ self.configuration.get_subject_category_values()[category_id] = list()
+ self.assignment.get_subject_category_assignments()[category_id] = dict()
+ return self.get_subject_categories()
+
+ def del_subject_category(self, category_id):
+ if category_id in self.get_subject_categories():
+ self.configuration.get_subject_category_values().pop(category_id)
+ self.assignment.get_subject_category_assignments().pop(category_id)
+ self.get_subject_categories().remove(category_id)
+ return self.get_subject_categories()
+ else:
+ return "[ERROR] Del Subject Category: Subject Category Unknown"
+
+ def get_object_categories(self):
+ return self.metadata.get_object_categories()
+
+ def add_object_category(self, category_id):
+ if category_id in self.get_object_categories():
+ return "[ERROR] Add Object Category: Object Category Exists"
+ else:
+ self.get_object_categories().append(category_id)
+ self.configuration.get_object_category_values()[category_id] = list()
+ self.assignment.get_object_category_assignments()[category_id] = dict()
+ return self.get_object_categories()
+
+ def del_object_category(self, category_id):
+ if category_id in self.get_object_categories():
+ self.configuration.get_object_category_values().pop(category_id)
+ self.assignment.get_object_category_assignments().pop(category_id)
+ self.get_object_categories().remove(category_id)
+ return self.get_object_categories()
+ else:
+ return "[ERROR] Del Object Category: Object Category Unknown"
+
+ def get_meta_rule(self):
+ return self.metadata.get_meta_rule()
+
+ # ---------------- configuration api ----------------
+
+ def get_subject_category_values(self, category_id):
+ return self.configuration.get_subject_category_values()[category_id]
+
+ def add_subject_category_value(self, category_id, category_value):
+ if category_value in self.configuration.get_subject_category_values()[category_id]:
+ return "[ERROR] Add Subject Category Value: Subject Category Value Exists"
+ else:
+ self.configuration.get_subject_category_values()[category_id].append(category_value)
+ return self.configuration.get_subject_category_values()[category_id]
+
+ def del_subject_category_value(self, category_id, category_value):
+ if category_value in self.configuration.get_subject_category_values()[category_id]:
+ self.configuration.get_subject_category_values()[category_id].remove(category_value)
+ return self.configuration.get_subject_category_values()[category_id]
+ else:
+ return "[ERROR] Del Subject Category Value: Subject Category Value Unknown"
+
+ def get_object_category_values(self, category_id):
+ return self.configuration.get_object_category_values()[category_id]
+
+ def add_object_category_value(self, category_id, category_value):
+ if category_value in self.configuration.get_object_category_values()[category_id]:
+ return "[ERROR] Add Object Category Value: Object Category Value Exists"
+ else:
+ self.configuration.get_object_category_values()[category_id].append(category_value)
+ return self.configuration.get_object_category_values()[category_id]
+
+ def del_object_category_value(self, category_id, category_value):
+ if category_value in self.configuration.get_object_category_values()[category_id]:
+ self.configuration.get_object_category_values()[category_id].remove(category_value)
+ return self.configuration.get_object_category_values()[category_id]
+ else:
+ return "[ERROR] Del Object Category Value: Object Category Value Unknown"
+
+ def get_meta_rules(self):
+ return self.metadata.get_meta_rule()
+
+ def _build_rule_from_list(self, relation, rule):
+ rule = list(rule)
+ _rule = dict()
+ _rule["sub_cat_value"] = dict()
+ _rule["obj_cat_value"] = dict()
+ if relation in self.metadata.get_meta_rule()["sub_meta_rules"]:
+ _rule["sub_cat_value"][relation] = dict()
+ _rule["obj_cat_value"][relation] = dict()
+ for s_category in self.metadata.get_meta_rule()["sub_meta_rules"][relation]["subject_categories"]:
+ _rule["sub_cat_value"][relation][s_category] = rule.pop(0)
+ for o_category in self.metadata.get_meta_rule()["sub_meta_rules"][relation]["object_categories"]:
+ _rule["obj_cat_value"][relation][o_category] = rule.pop(0)
+ return _rule
+
+ def get_rules(self, full=False):
+ if not full:
+ return self.configuration.get_rules()
+ rules = dict()
+ for key in self.configuration.get_rules():
+ rules[key] = map(lambda x: self._build_rule_from_list(key, x), self.configuration.get_rules()[key])
+ return rules
+
+ def add_rule(self, sub_cat_value_dict, obj_cat_value_dict):
+ for _relation in self.metadata.get_meta_rule()["sub_meta_rules"]:
+ _sub_rule = list()
+ for sub_subject_category in self.metadata.get_meta_rule()["sub_meta_rules"][_relation]["subject_categories"]:
+ try:
+ if sub_cat_value_dict[_relation][sub_subject_category] \
+ in self.configuration.get_subject_category_values()[sub_subject_category]:
+ _sub_rule.append(sub_cat_value_dict[_relation][sub_subject_category])
+ else:
+ return "[Error] Add Rule: Subject Category Value Unknown"
+ except KeyError as e:
+ # DThom: sometimes relation attribute is buggy, I don't know why...
+ print(e)
+
+ #BUG: when adding a new category in rules despite it was previously adding
+ # data = {
+ # "sub_cat_value":
+ # {"relation_super":
+ # {"subject_security_level": "high", "AMH_CAT": "AMH_VAL"}
+ # },
+ # "obj_cat_value":
+ # {"relation_super":
+ # {"object_security_level": "medium"}
+ # }
+ # }
+ # traceback = """
+ # Traceback (most recent call last):
+ # File "/moon/gui/views_json.py", line 20, in wrapped
+ # result = function(*args, **kwargs)
+ # File "/moon/gui/views_json.py", line 429, in rules
+ # obj_cat_value=filter_input(data["obj_cat_value"]))
+ # File "/usr/local/lib/python2.7/dist-packages/moon/core/pap/core.py", line 380, in add_rule
+ # obj_cat_value)
+ # File "/usr/local/lib/python2.7/dist-packages/moon/core/pdp/extension.py", line 414, in add_rule
+ # if obj_cat_value_dict[_relation][sub_object_category] \
+ # KeyError: u'action'
+ # """
+ for sub_object_category in self.metadata.get_meta_rule()["sub_meta_rules"][_relation]["object_categories"]:
+ if obj_cat_value_dict[_relation][sub_object_category] \
+ in self.configuration.get_object_category_values()[sub_object_category]:
+ _sub_rule.append(obj_cat_value_dict[_relation][sub_object_category])
+ else:
+ return "[Error] Add Rule: Object Category Value Unknown"
+
+ if _sub_rule in self.configuration.get_rules()[_relation]:
+ return "[Error] Add Rule: Rule Exists"
+ else:
+ self.configuration.get_rules()[_relation].append(_sub_rule)
+ return {
+ sub_cat_value_dict.keys()[0]: ({
+ "sub_cat_value": copy.deepcopy(sub_cat_value_dict),
+ "obj_cat_value": copy.deepcopy(obj_cat_value_dict)
+ }, )
+ }
+ return self.configuration.get_rules()
+
+ def del_rule(self, sub_cat_value_dict, obj_cat_value_dict):
+ for _relation in self.metadata.get_meta_rule()["sub_meta_rules"]:
+ _sub_rule = list()
+ for sub_subject_category in self.metadata.get_meta_rule()["sub_meta_rules"][_relation]["subject_categories"]:
+ _sub_rule.append(sub_cat_value_dict[_relation][sub_subject_category])
+
+ for sub_object_category in self.metadata.get_meta_rule()["sub_meta_rules"][_relation]["object_categories"]:
+ _sub_rule.append(obj_cat_value_dict[_relation][sub_object_category])
+
+ if _sub_rule in self.configuration.get_rules()[_relation]:
+ self.configuration.get_rules()[_relation].remove(_sub_rule)
+ else:
+ return "[Error] Del Rule: Rule Unknown"
+ return self.configuration.get_rules()
+
+ # ---------------- perimeter api ----------------
+
+ def get_subjects(self):
+ return self.perimeter.get_subjects()
+
+ def get_objects(self):
+ return self.perimeter.get_objects()
+
+ def add_subject(self, subject_id):
+ if subject_id in self.perimeter.get_subjects():
+ return "[ERROR] Add Subject: Subject Exists"
+ else:
+ self.perimeter.get_subjects().append(subject_id)
+ return self.perimeter.get_subjects()
+
+ def del_subject(self, subject_id):
+ if subject_id in self.perimeter.get_subjects():
+ self.perimeter.get_subjects().remove(subject_id)
+ return self.perimeter.get_subjects()
+ else:
+ return "[ERROR] Del Subject: Subject Unknown"
+
+ def add_object(self, object_id):
+ if object_id in self.perimeter.get_objects():
+ return "[ERROR] Add Object: Object Exists"
+ else:
+ self.perimeter.get_objects().append(object_id)
+ return self.perimeter.get_objects()
+
+ def del_object(self, object_id):
+ if object_id in self.perimeter.get_objects():
+ self.perimeter.get_objects().remove(object_id)
+ return self.perimeter.get_objects()
+ else:
+ return "[ERROR] Del Object: Object Unknown"
+
+ # ---------------- assignment api ----------------
+
+ def get_subject_assignments(self, category_id):
+ if category_id in self.metadata.get_subject_categories():
+ return self.assignment.get_subject_category_assignments()[category_id]
+ else:
+ return "[ERROR] Get Subject Assignment: Subject Category Unknown"
+
+ def add_subject_assignment(self, category_id, subject_id, category_value):
+ if category_id in self.metadata.get_subject_categories():
+ if subject_id in self.perimeter.get_subjects():
+ if category_value in self.configuration.get_subject_category_values()[category_id]:
+ if category_id in self.assignment.get_subject_category_assignments().keys():
+ if subject_id in self.assignment.get_subject_category_assignments()[category_id].keys():
+ if category_value in self.assignment.get_subject_category_assignments()[category_id][subject_id]:
+ return "[ERROR] Add Subject Assignment: Subject Assignment Exists"
+ else:
+ self.assignment.get_subject_category_assignments()[category_id][subject_id].extend([category_value])
+ else:
+ self.assignment.get_subject_category_assignments()[category_id][subject_id] = [category_value]
+ else:
+ self.assignment.get_subject_category_assignments()[category_id] = {subject_id: [category_value]}
+ return self.assignment.get_subject_category_assignments()
+ else:
+ return "[ERROR] Add Subject Assignment: Subject Category Value Unknown"
+ else:
+ return "[ERROR] Add Subject Assignment: Subject Unknown"
+ else:
+ return "[ERROR] Add Subject Assignment: Subject Category Unknown"
+
+ def del_subject_assignment(self, category_id, subject_id, category_value):
+ if category_id in self.metadata.get_subject_categories():
+ if subject_id in self.perimeter.get_subjects():
+ if category_value in self.configuration.get_subject_category_values()[category_id]:
+ if len(self.assignment.get_subject_category_assignments()[category_id][subject_id]) >= 2:
+ self.assignment.get_subject_category_assignments()[category_id][subject_id].remove(category_value)
+ else:
+ self.assignment.get_subject_category_assignments()[category_id].pop(subject_id)
+ return self.assignment.get_subject_category_assignments()
+ else:
+ return "[ERROR] Del Subject Assignment: Assignment Unknown"
+ else:
+ return "[ERROR] Del Subject Assignment: Subject Unknown"
+ else:
+ return "[ERROR] Del Subject Assignment: Subject Category Unknown"
+
+ def get_object_assignments(self, category_id):
+ if category_id in self.metadata.get_object_categories():
+ return self.assignment.get_object_category_assignments()[category_id]
+ else:
+ return "[ERROR] Get Object Assignment: Object Category Unknown"
+
+ def add_object_assignment(self, category_id, object_id, category_value):
+ if category_id in self.metadata.get_object_categories():
+ if object_id in self.perimeter.get_objects():
+ if category_value in self.configuration.get_object_category_values()[category_id]:
+ if category_id in self.assignment.get_object_category_assignments().keys():
+ if object_id in self.assignment.get_object_category_assignments()[category_id].keys():
+ if category_value in self.assignment.get_object_category_assignments()[category_id][object_id]:
+ return "[ERROR] Add Object Assignment: Object Assignment Exists"
+ else:
+ self.assignment.get_object_category_assignments()[category_id][object_id].extend([category_value])
+ else:
+ self.assignment.get_object_category_assignments()[category_id][object_id] = [category_value]
+ else:
+ self.assignment.get_object_category_assignments()[category_id] = {object_id: [category_value]}
+ return self.assignment.get_object_category_assignments()
+ else:
+ return "[ERROR] Add Object Assignment: Object Category Value Unknown"
+ else:
+ return "[ERROR] Add Object Assignment: Object Unknown"
+ else:
+ return "[ERROR] Add Object Assignment: Object Category Unknown"
+
+ def del_object_assignment(self, category_id, object_id, category_value):
+ if category_id in self.metadata.get_object_categories():
+ if object_id in self.perimeter.get_objects():
+ if category_value in self.configuration.get_object_category_values()[category_id]:
+ if len(self.assignment.get_object_category_assignments()[category_id][object_id]) >= 2:
+ self.assignment.get_object_category_assignments()[category_id][object_id].remove(category_value)
+ else:
+ self.assignment.get_object_category_assignments()[category_id].pop(object_id)
+ return self.assignment.get_object_category_assignments()
+ else:
+ return "[ERROR] Del Object Assignment: Assignment Unknown"
+ else:
+ return "[ERROR] Del Object Assignment: Object Unknown"
+ else:
+ return "[ERROR] Del Object Assignment: Object Category Unknown"
+
+ # ---------------- inter-extension API ----------------
+
+ def create_requesting_collaboration(self, sub_list, vent_uuid, act):
+ _sub_cat_values = dict()
+ _obj_cat_values = dict()
+
+ if type(self.add_object(vent_uuid)) is not list:
+ return "[Error] Create Requesting Collaboration: No Success"
+ for _relation in self.get_meta_rule()["sub_meta_rules"]:
+ for _sub_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["subject_categories"]:
+ _sub_cat_value = str(uuid4())
+ if type(self.add_subject_category_value(_sub_cat_id, _sub_cat_value)) is not list:
+ return "[Error] Create Requesting Collaboration: No Success"
+ _sub_cat_values[_relation] = {_sub_cat_id: _sub_cat_value}
+ for _sub in sub_list:
+ if type(self.add_subject_assignment(_sub_cat_id, _sub, _sub_cat_value)) is not dict:
+ return "[Error] Create Requesting Collaboration: No Success"
+
+ for _obj_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["object_categories"]:
+ if _obj_cat_id == 'action':
+ _obj_cat_values[_relation][_obj_cat_id] = act
+ else:
+ _obj_cat_value = str(uuid4())
+ if type(self.add_object_category_value(_obj_cat_id, _obj_cat_value)) is not list:
+ return "[Error] Create Requesting Collaboration: No Success"
+ if type(self.add_object_assignment(_obj_cat_id, vent_uuid, _obj_cat_value)) is not dict:
+ return "[Error] Create Requesting Collaboration: No Success"
+ _obj_cat_values[_relation] = {_obj_cat_id: _obj_cat_value}
+
+ _rule = self.add_rule(_sub_cat_values, _obj_cat_values)
+ if type(_rule) is not dict:
+ return "[Error] Create Requesting Collaboration: No Success"
+ return {"subject_category_value_dict": _sub_cat_values, "object_category_value_dict": _obj_cat_values,
+ "rule": _rule}
+
+ def destroy_requesting_collaboration(self, sub_list, vent_uuid, sub_cat_value_dict, obj_cat_value_dict):
+ for _relation in self.get_meta_rule()["sub_meta_rules"]:
+ for _sub_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["subject_categories"]:
+ for _sub in sub_list:
+ if type(self.del_subject_assignment(_sub_cat_id, _sub, sub_cat_value_dict[_relation][_sub_cat_id]))\
+ is not dict:
+ return "[Error] Destroy Requesting Collaboration: No Success"
+ if type(self.del_subject_category_value(_sub_cat_id, sub_cat_value_dict[_relation][_sub_cat_id])) \
+ is not list:
+ return "[Error] Destroy Requesting Collaboration: No Success"
+
+ for _obj_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["object_categories"]:
+ if _obj_cat_id == "action":
+ pass # TODO: reconsidering the action as object attribute
+ else:
+ if type(self.del_object_assignment(_obj_cat_id, vent_uuid, obj_cat_value_dict[_relation][_obj_cat_id])) is not dict:
+ return "[Error] Destroy Requesting Collaboration: No Success"
+ if type(self.del_object_category_value(_obj_cat_id, obj_cat_value_dict[_relation][_obj_cat_id])) is not list:
+ return "[Error] Destroy Requesting Collaboration: No Success"
+
+ if type(self.del_rule(sub_cat_value_dict, obj_cat_value_dict)) is not dict:
+ return "[Error] Destroy Requesting Collaboration: No Success"
+ if type(self.del_object(vent_uuid)) is not list:
+ return "[Error] Destroy Requesting Collaboration: No Success"
+ return "[Destroy Requesting Collaboration] OK"
+
+ def create_requested_collaboration(self, vent_uuid, obj_list, act):
+ _sub_cat_values = dict()
+ _obj_cat_values = dict()
+
+ if type(self.add_subject(vent_uuid)) is not list:
+ return "[Error] Create Requested Collaboration: No Success"
+
+ for _relation in self.get_meta_rule()["sub_meta_rules"]:
+ for _sub_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["subject_categories"]:
+ _sub_cat_value = str(uuid4())
+ if type(self.add_subject_category_value(_sub_cat_id, _sub_cat_value)) is not list:
+ return "[Error] Create Requested Collaboration: No Success"
+ _sub_cat_values[_relation] = {_sub_cat_id: _sub_cat_value}
+ if type(self.add_subject_assignment(_sub_cat_id, vent_uuid, _sub_cat_value)) is not dict:
+ return "[Error] Create Requested Collaboration: No Success"
+
+ for _obj_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["object_categories"]:
+ if _obj_cat_id == 'action':
+ _obj_cat_values[_relation][_obj_cat_id] = act
+ else:
+ _obj_cat_value = str(uuid4())
+ if type(self.add_object_category_value(_obj_cat_id, _obj_cat_value)) is not list:
+ return "[Error] Create Requested Collaboration: No Success"
+ _obj_cat_values[_relation] = {_obj_cat_id: _obj_cat_value}
+ for _obj in obj_list:
+ if type(self.add_object_assignment(_obj_cat_id, _obj, _obj_cat_value)) is not dict:
+ return "[Error] Create Requested Collaboration: No Success"
+
+ _rule = self.add_rule(_sub_cat_values, _obj_cat_values)
+ if type(_rule) is not dict:
+ return "[Error] Create Requested Collaboration: No Success"
+ return {"subject_category_value_dict": _sub_cat_values, "object_category_value_dict": _obj_cat_values,
+ "rule": _rule}
+
+ def destroy_requested_collaboration(self, vent_uuid, obj_list, sub_cat_value_dict, obj_cat_value_dict):
+ for _relation in self.get_meta_rule()["sub_meta_rules"]:
+ for _sub_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["subject_categories"]:
+ if type(self.del_subject_assignment(_sub_cat_id, vent_uuid, sub_cat_value_dict[_relation][_sub_cat_id])) is not dict:
+ return "[Error] Destroy Requested Collaboration: No Success"
+ if type(self.del_subject_category_value(_sub_cat_id, sub_cat_value_dict[_relation][_sub_cat_id])) is not list:
+ return "[Error] Destroy Requested Collaboration: No Success"
+
+ for _obj_cat_id in self.get_meta_rule()["sub_meta_rules"][_relation]["object_categories"]:
+ if _obj_cat_id == "action":
+ pass # TODO: reconsidering the action as object attribute
+ else:
+ for _obj in obj_list:
+ if type(self.del_object_assignment(_obj_cat_id, _obj, obj_cat_value_dict[_relation][_obj_cat_id])) is not dict:
+ return "[Error] Destroy Requested Collaboration: No Success"
+ if type(self.del_object_category_value(_obj_cat_id, obj_cat_value_dict[_relation][_obj_cat_id])) is not list:
+ return "[Error] Destroy Requested Collaboration: No Success"
+
+ if type(self.del_rule(sub_cat_value_dict, obj_cat_value_dict)) is not dict:
+ return "[Error] Destroy Requested Collaboration: No Success"
+ if type(self.del_subject(vent_uuid)) is not list:
+ return "[Error] Destroy Requested Collaboration: No Success"
+ return "[Destroy Requested Collaboration] OK"
+
+ # ---------------- sync_db api ----------------
+
+ def get_data(self):
+ data = dict()
+ data["metadata"] = self.metadata.get_data()
+ data["configuration"] = self.configuration.get_data()
+ data["perimeter"] = self.perimeter.get_data()
+ data["assignment"] = self.assignment.get_data()
+ return data
+
+ def set_data(self, extension_data):
+ self.metadata.set_data(extension_data["metadata"])
+ self.configuration.set_data(extension_data["configuration"])
+ self.perimeter.set_data(extension_data["perimeter"])
+ self.assignment.set_data(extension_data["assignment"])
diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/moon/migrate_repo/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/migrate_repo/__init__.py
diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg
new file mode 100644
index 00000000..7a7bd1f8
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg
@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=moon
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False
diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py
new file mode 100644
index 00000000..a49ca206
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py
@@ -0,0 +1,194 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import sqlalchemy as sql
+from keystone.common import sql as k_sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ intra_extension_table = sql.Table(
+ 'intra_extension',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('name', sql.String(64), nullable=False),
+ sql.Column('model', sql.String(64), nullable=True),
+ sql.Column('description', sql.Text(), nullable=True),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ intra_extension_table.create(migrate_engine, checkfirst=True)
+
+ subjects_table = sql.Table(
+ 'subject',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('subjects', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ subjects_table.create(migrate_engine, checkfirst=True)
+
+ objects_table = sql.Table(
+ 'object',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('objects', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ objects_table.create(migrate_engine, checkfirst=True)
+
+ actions_table = sql.Table(
+ 'action',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('actions', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ actions_table.create(migrate_engine, checkfirst=True)
+
+ subject_categories_table = sql.Table(
+ 'subject_category',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('subject_categories', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ subject_categories_table.create(migrate_engine, checkfirst=True)
+
+ object_categories_table = sql.Table(
+ 'object_category',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('object_categories', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ object_categories_table.create(migrate_engine, checkfirst=True)
+
+ action_categories_table = sql.Table(
+ 'action_category',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('action_categories', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ action_categories_table.create(migrate_engine, checkfirst=True)
+
+ subject_category_values_table = sql.Table(
+ 'subject_category_scope',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('subject_category_scope', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ subject_category_values_table.create(migrate_engine, checkfirst=True)
+
+ object_category_values_table = sql.Table(
+ 'object_category_scope',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('object_category_scope', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ object_category_values_table.create(migrate_engine, checkfirst=True)
+
+ action_category_values_table = sql.Table(
+ 'action_category_scope',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('action_category_scope', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ action_category_values_table.create(migrate_engine, checkfirst=True)
+
+ subject_category_assignments_table = sql.Table(
+ 'subject_category_assignment',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('subject_category_assignments', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ subject_category_assignments_table.create(migrate_engine, checkfirst=True)
+
+ object_category_assignments_table = sql.Table(
+ 'object_category_assignment',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('object_category_assignments', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ object_category_assignments_table.create(migrate_engine, checkfirst=True)
+
+ action_category_assignments_table = sql.Table(
+ 'action_category_assignment',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('action_category_assignments', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ action_category_assignments_table.create(migrate_engine, checkfirst=True)
+
+ meta_rule_table = sql.Table(
+ 'metarule',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('sub_meta_rules', k_sql.JsonBlob(), nullable=True),
+ sql.Column('aggregation', sql.Text(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ meta_rule_table.create(migrate_engine, checkfirst=True)
+
+ rule_table = sql.Table(
+ 'rule',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('rules', k_sql.JsonBlob(), nullable=True),
+ sql.Column('intra_extension_uuid', sql.ForeignKey("intra_extension.id"), nullable=False),
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ rule_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ for _table in (
+ 'subject',
+ 'object',
+ 'action',
+ 'subject_category',
+ 'object_category',
+ 'action_category',
+ 'subject_category_scope',
+ 'object_category_scope',
+ 'action_category_scope',
+ 'subject_category_assignment',
+ 'object_category_assignment',
+ 'action_category_assignment',
+ 'metarule',
+ 'rule',
+ 'intra_extension',
+ ):
+ try:
+ table = sql.Table(_table, meta, autoload=True)
+ table.drop(migrate_engine, checkfirst=True)
+ except Exception as e:
+ print(e.message)
+
+
diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py
new file mode 100644
index 00000000..a0f9095f
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py
@@ -0,0 +1,34 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import sqlalchemy as sql
+from keystone.common import sql as k_sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ region_table = sql.Table(
+ 'inter_extension',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('requesting_intra_extension_uuid', sql.String(64), nullable=False),
+ sql.Column('requested_intra_extension_uuid', sql.String(64), nullable=False),
+ sql.Column('virtual_entity_uuid', sql.String(64), nullable=False),
+ sql.Column('genre', sql.String(64), nullable=False),
+ sql.Column('description', sql.Text(), nullable=True),
+
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ region_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ table = sql.Table('inter_extension', meta, autoload=True)
+ table.drop(migrate_engine, checkfirst=True)
diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py
new file mode 100644
index 00000000..06932754
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py
@@ -0,0 +1,32 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+import sqlalchemy as sql
+from keystone.common import sql as k_sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ region_table = sql.Table(
+ 'tenants',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('name', sql.String(128), nullable=True),
+ sql.Column('authz', sql.String(64), nullable=True),
+ sql.Column('admin', sql.String(64), nullable=True),
+
+ mysql_engine='InnoDB',
+ mysql_charset='utf8')
+ region_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ table = sql.Table('tenants', meta, autoload=True)
+ table.drop(migrate_engine, checkfirst=True)
diff --git a/keystone-moon/keystone/contrib/moon/routers.py b/keystone-moon/keystone/contrib/moon/routers.py
new file mode 100644
index 00000000..e1eb1130
--- /dev/null
+++ b/keystone-moon/keystone/contrib/moon/routers.py
@@ -0,0 +1,443 @@
+# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
+# This software is distributed under the terms and conditions of the 'Apache-2.0'
+# license which can be found in the file 'LICENSE' in this package distribution
+# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
+
+"""WSGI Routers for the Moon service."""
+
+from keystone.contrib.moon import controllers
+from keystone.common import wsgi
+
+
+class Routers(wsgi.RoutersBase):
+ """API Endpoints for the Moon extension.
+ """
+
+ PATH_PREFIX = '/OS-MOON'
+
+ @staticmethod
+ def _get_rel(component):
+ return 'http://docs.openstack.org/api/openstack-authz/3/param/{}'.format(component)
+
+ @staticmethod
+ def _get_path(component):
+ return 'http://docs.openstack.org/api/openstack-authz/3/param/{}'.format(component)
+
+ def append_v3_routers(self, mapper, routers):
+ # Controllers creation
+ authz_controller = controllers.Authz_v3()
+ intra_ext_controller = controllers.IntraExtensions()
+ authz_policies_controller = controllers.AuthzPolicies()
+ tenants_controller = controllers.Tenants()
+ logs_controller = controllers.Logs()
+ inter_ext_controller = controllers.InterExtensions()
+
+ # Authz route
+ self._add_resource(
+ mapper, authz_controller,
+ path=self.PATH_PREFIX+'/authz/{tenant_id}/{subject_id}/{object_id}/{action_id}',
+ get_action='get_authz',
+ rel=self._get_rel('authz'),
+ path_vars={
+ 'tenant_id': self._get_path('tenants'),
+ 'subject_id': self._get_path('subjects'),
+ 'object_id': self._get_path('objects'),
+ 'action_id': self._get_path('actions'),
+ })
+
+ # IntraExtensions route
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions',
+ get_action='get_intra_extensions',
+ post_action='create_intra_extension',
+ rel=self._get_rel('intra_extensions'),
+ path_vars={})
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}',
+ get_action='get_intra_extension',
+ delete_action='delete_intra_extension',
+ rel=self._get_rel('intra_extensions'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ self._add_resource(
+ mapper, authz_policies_controller,
+ path=self.PATH_PREFIX+'/authz_policies',
+ get_action='get_authz_policies',
+ rel=self._get_rel('authz_policies'),
+ path_vars={})
+
+ # Perimeter route
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subjects',
+ get_action='get_subjects',
+ post_action='add_subject',
+ rel=self._get_rel('subjects'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subjects/{subject_id}',
+ delete_action='del_subject',
+ rel=self._get_rel('subjects'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/objects',
+ get_action='get_objects',
+ post_action='add_object',
+ rel=self._get_rel('subjects'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/objects/{object_id}',
+ delete_action='del_object',
+ rel=self._get_rel('objects'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/actions',
+ get_action='get_actions',
+ post_action='add_action',
+ rel=self._get_rel('actions'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/actions/{action_id}',
+ delete_action='del_action',
+ rel=self._get_rel('actions'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ # Metadata route
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subject_categories',
+ get_action='get_subject_categories',
+ post_action='add_subject_category',
+ rel=self._get_rel('subject_categories'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subject_categories/{subject_category_id}',
+ delete_action='del_subject_category',
+ rel=self._get_rel('subject_categories'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/object_categories',
+ get_action='get_object_categories',
+ post_action='add_object_category',
+ rel=self._get_rel('object_categories'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/object_categories/{object_category_id}',
+ delete_action='del_object_category',
+ rel=self._get_rel('object_categories'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/action_categories',
+ get_action='get_action_categories',
+ post_action='add_action_category',
+ rel=self._get_rel('action_categories'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/action_categories/{action_category_id}',
+ delete_action='del_action_category',
+ rel=self._get_rel('action_categories'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ # Scope route
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subject_category_scope',
+ post_action='add_subject_category_scope',
+ rel=self._get_rel('subject_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subject_category_scope/{subject_category_id}',
+ get_action='get_subject_category_scope',
+ rel=self._get_rel('subject_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subject_category_scope/{subject_category_id}/{subject_category_scope_id}',
+ delete_action='del_subject_category_scope',
+ rel=self._get_rel('subject_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/object_category_scope',
+ post_action='add_object_category_scope',
+ rel=self._get_rel('object_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/object_category_scope/{object_category_id}',
+ get_action='get_object_category_scope',
+ rel=self._get_rel('object_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/object_category_scope/{object_category_id}/{object_category_scope_id}',
+ delete_action='del_object_category_scope',
+ rel=self._get_rel('object_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/action_category_scope',
+ post_action='add_action_category_scope',
+ rel=self._get_rel('action_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/action_category_scope/{action_category_id}',
+ get_action='get_action_category_scope',
+ rel=self._get_rel('action_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/action_category_scope/{action_category_id}/{action_category_scope_id}',
+ delete_action='del_action_category_scope',
+ rel=self._get_rel('action_category_scope'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ # Assignment route
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subject_assignments/{subject_id}',
+ get_action='get_subject_assignments',
+ rel=self._get_rel('subject_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/subject_assignments',
+ post_action='add_subject_assignment',
+ rel=self._get_rel('subject_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/'
+ 'subject_assignments/{subject_id}/{subject_category}/{subject_category_scope}',
+ delete_action='del_subject_assignment',
+ rel=self._get_rel('subject_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/object_assignments/{object_id}',
+ get_action='get_object_assignments',
+ rel=self._get_rel('object_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/object_assignments',
+ post_action='add_object_assignment',
+ rel=self._get_rel('object_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/'
+ 'object_assignments/{object_id}/{object_category}/{object_category_scope}',
+ delete_action='del_object_assignment',
+ rel=self._get_rel('object_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/action_assignments/{action_id}',
+ get_action='get_action_assignments',
+ rel=self._get_rel('action_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/action_assignments',
+ post_action='add_action_assignment',
+ rel=self._get_rel('action_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/'
+ 'action_assignments/{action_id}/{action_category}/{action_category_scope}',
+ delete_action='del_action_assignment',
+ rel=self._get_rel('action_assignments'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ # Metarule route
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/aggregation_algorithms',
+ get_action='get_aggregation_algorithms',
+ rel=self._get_rel('aggregation_algorithms'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/aggregation_algorithm',
+ get_action='get_aggregation_algorithm',
+ post_action='set_aggregation_algorithm',
+ rel=self._get_rel('aggregation_algorithms'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/sub_meta_rule',
+ get_action='get_sub_meta_rule',
+ post_action='set_sub_meta_rule',
+ rel=self._get_rel('sub_meta_rule'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/sub_meta_rule_relations',
+ get_action='get_sub_meta_rule_relations',
+ rel=self._get_rel('sub_meta_rule_relations'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ # Rules route
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/sub_rules',
+ get_action='get_sub_rules',
+ post_action='set_sub_rule',
+ rel=self._get_rel('sub_rules'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+ self._add_resource(
+ mapper, intra_ext_controller,
+ path=self.PATH_PREFIX+'/intra_extensions/{intra_extensions_id}/sub_rules/{relation_name}/{rule}',
+ delete_action='del_sub_rule',
+ rel=self._get_rel('sub_rules'),
+ path_vars={
+ 'intra_extensions_id': self._get_path('intra_extensions'),
+ })
+
+ # Tenants route
+ self._add_resource(
+ mapper, tenants_controller,
+ path=self.PATH_PREFIX+'/tenants',
+ get_action='get_tenants',
+ rel=self._get_rel('tenants'),
+ path_vars={})
+ self._add_resource(
+ mapper, tenants_controller,
+ path=self.PATH_PREFIX+'/tenant',
+ post_action='set_tenant',
+ rel=self._get_rel('tenants'),
+ path_vars={})
+ self._add_resource(
+ mapper, tenants_controller,
+ path=self.PATH_PREFIX+'/tenant/{tenant_uuid}',
+ get_action='get_tenant',
+ delete_action='delete_tenant',
+ rel=self._get_rel('tenants'),
+ path_vars={
+ 'tenant_uuid': self._get_path('tenants'),
+ })
+
+ # Logs route
+ self._add_resource(
+ mapper, logs_controller,
+ path=self.PATH_PREFIX+'/logs',
+ get_action='get_logs',
+ rel=self._get_rel('logs'),
+ path_vars={
+ })
+ self._add_resource(
+ mapper, logs_controller,
+ path=self.PATH_PREFIX+'/logs/{options}',
+ get_action='get_logs',
+ rel=self._get_rel('logs'),
+ path_vars={
+ })
+
+ # InterExtensions route
+ # self._add_resource(
+ # mapper, inter_ext_controller,
+ # path=self.PATH_PREFIX+'/inter_extensions',
+ # get_action='get_inter_extensions',
+ # post_action='create_inter_extension',
+ # rel=self._get_rel('inter_extensions'),
+ # path_vars={})
+ # self._add_resource(
+ # mapper, inter_ext_controller,
+ # path=self.PATH_PREFIX+'/inter_extensions/{inter_extensions_id}',
+ # get_action='get_inter_extension',
+ # delete_action='delete_inter_extension',
+ # rel=self._get_rel('inter_extensions'),
+ # path_vars={
+ # 'inter_extensions_id': self._get_path('inter_extensions'),
+ # })
diff --git a/keystone-moon/keystone/contrib/oauth1/__init__.py b/keystone-moon/keystone/contrib/oauth1/__init__.py
new file mode 100644
index 00000000..8cab2498
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.oauth1.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/oauth1/backends/__init__.py b/keystone-moon/keystone/contrib/oauth1/backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/backends/__init__.py
diff --git a/keystone-moon/keystone/contrib/oauth1/backends/sql.py b/keystone-moon/keystone/contrib/oauth1/backends/sql.py
new file mode 100644
index 00000000..c6ab6e5a
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/backends/sql.py
@@ -0,0 +1,272 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import random as _random
+import uuid
+
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+import six
+
+from keystone.common import sql
+from keystone.contrib.oauth1 import core
+from keystone import exception
+from keystone.i18n import _
+
+
+random = _random.SystemRandom()
+
+
+class Consumer(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'consumer'
+ attributes = ['id', 'description', 'secret']
+ id = sql.Column(sql.String(64), primary_key=True, nullable=False)
+ description = sql.Column(sql.String(64), nullable=True)
+ secret = sql.Column(sql.String(64), nullable=False)
+ extra = sql.Column(sql.JsonBlob(), nullable=False)
+
+
+class RequestToken(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'request_token'
+ attributes = ['id', 'request_secret',
+ 'verifier', 'authorizing_user_id', 'requested_project_id',
+ 'role_ids', 'consumer_id', 'expires_at']
+ id = sql.Column(sql.String(64), primary_key=True, nullable=False)
+ request_secret = sql.Column(sql.String(64), nullable=False)
+ verifier = sql.Column(sql.String(64), nullable=True)
+ authorizing_user_id = sql.Column(sql.String(64), nullable=True)
+ requested_project_id = sql.Column(sql.String(64), nullable=False)
+ role_ids = sql.Column(sql.Text(), nullable=True)
+ consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'),
+ nullable=False, index=True)
+ expires_at = sql.Column(sql.String(64), nullable=True)
+
+ @classmethod
+ def from_dict(cls, user_dict):
+ return cls(**user_dict)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class AccessToken(sql.ModelBase, sql.DictBase):
+ __tablename__ = 'access_token'
+ attributes = ['id', 'access_secret', 'authorizing_user_id',
+ 'project_id', 'role_ids', 'consumer_id',
+ 'expires_at']
+ id = sql.Column(sql.String(64), primary_key=True, nullable=False)
+ access_secret = sql.Column(sql.String(64), nullable=False)
+ authorizing_user_id = sql.Column(sql.String(64), nullable=False,
+ index=True)
+ project_id = sql.Column(sql.String(64), nullable=False)
+ role_ids = sql.Column(sql.Text(), nullable=False)
+ consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'),
+ nullable=False)
+ expires_at = sql.Column(sql.String(64), nullable=True)
+
+ @classmethod
+ def from_dict(cls, user_dict):
+ return cls(**user_dict)
+
+ def to_dict(self):
+ return dict(six.iteritems(self))
+
+
+class OAuth1(object):
+ def _get_consumer(self, session, consumer_id):
+ consumer_ref = session.query(Consumer).get(consumer_id)
+ if consumer_ref is None:
+ raise exception.NotFound(_('Consumer not found'))
+ return consumer_ref
+
+ def get_consumer_with_secret(self, consumer_id):
+ session = sql.get_session()
+ consumer_ref = self._get_consumer(session, consumer_id)
+ return consumer_ref.to_dict()
+
+ def get_consumer(self, consumer_id):
+ return core.filter_consumer(
+ self.get_consumer_with_secret(consumer_id))
+
+ def create_consumer(self, consumer):
+ consumer['secret'] = uuid.uuid4().hex
+ if not consumer.get('description'):
+ consumer['description'] = None
+ session = sql.get_session()
+ with session.begin():
+ consumer_ref = Consumer.from_dict(consumer)
+ session.add(consumer_ref)
+ return consumer_ref.to_dict()
+
+ def _delete_consumer(self, session, consumer_id):
+ consumer_ref = self._get_consumer(session, consumer_id)
+ session.delete(consumer_ref)
+
+ def _delete_request_tokens(self, session, consumer_id):
+ q = session.query(RequestToken)
+ req_tokens = q.filter_by(consumer_id=consumer_id)
+ req_tokens_list = set([x.id for x in req_tokens])
+ for token_id in req_tokens_list:
+ token_ref = self._get_request_token(session, token_id)
+ session.delete(token_ref)
+
+ def _delete_access_tokens(self, session, consumer_id):
+ q = session.query(AccessToken)
+ acc_tokens = q.filter_by(consumer_id=consumer_id)
+ acc_tokens_list = set([x.id for x in acc_tokens])
+ for token_id in acc_tokens_list:
+ token_ref = self._get_access_token(session, token_id)
+ session.delete(token_ref)
+
+ def delete_consumer(self, consumer_id):
+ session = sql.get_session()
+ with session.begin():
+ self._delete_request_tokens(session, consumer_id)
+ self._delete_access_tokens(session, consumer_id)
+ self._delete_consumer(session, consumer_id)
+
+ def list_consumers(self):
+ session = sql.get_session()
+ cons = session.query(Consumer)
+ return [core.filter_consumer(x.to_dict()) for x in cons]
+
+ def update_consumer(self, consumer_id, consumer):
+ session = sql.get_session()
+ with session.begin():
+ consumer_ref = self._get_consumer(session, consumer_id)
+ old_consumer_dict = consumer_ref.to_dict()
+ old_consumer_dict.update(consumer)
+ new_consumer = Consumer.from_dict(old_consumer_dict)
+ consumer_ref.description = new_consumer.description
+ consumer_ref.extra = new_consumer.extra
+ return core.filter_consumer(consumer_ref.to_dict())
+
+ def create_request_token(self, consumer_id, project_id, token_duration,
+ request_token_id=None, request_token_secret=None):
+ if request_token_id is None:
+ request_token_id = uuid.uuid4().hex
+ if request_token_secret is None:
+ request_token_secret = uuid.uuid4().hex
+ expiry_date = None
+ if token_duration:
+ now = timeutils.utcnow()
+ future = now + datetime.timedelta(seconds=token_duration)
+ expiry_date = timeutils.isotime(future, subsecond=True)
+
+ ref = {}
+ ref['id'] = request_token_id
+ ref['request_secret'] = request_token_secret
+ ref['verifier'] = None
+ ref['authorizing_user_id'] = None
+ ref['requested_project_id'] = project_id
+ ref['role_ids'] = None
+ ref['consumer_id'] = consumer_id
+ ref['expires_at'] = expiry_date
+ session = sql.get_session()
+ with session.begin():
+ token_ref = RequestToken.from_dict(ref)
+ session.add(token_ref)
+ return token_ref.to_dict()
+
+ def _get_request_token(self, session, request_token_id):
+ token_ref = session.query(RequestToken).get(request_token_id)
+ if token_ref is None:
+ raise exception.NotFound(_('Request token not found'))
+ return token_ref
+
+ def get_request_token(self, request_token_id):
+ session = sql.get_session()
+ token_ref = self._get_request_token(session, request_token_id)
+ return token_ref.to_dict()
+
+ def authorize_request_token(self, request_token_id, user_id,
+ role_ids):
+ session = sql.get_session()
+ with session.begin():
+ token_ref = self._get_request_token(session, request_token_id)
+ token_dict = token_ref.to_dict()
+ token_dict['authorizing_user_id'] = user_id
+ token_dict['verifier'] = ''.join(random.sample(core.VERIFIER_CHARS,
+ 8))
+ token_dict['role_ids'] = jsonutils.dumps(role_ids)
+
+ new_token = RequestToken.from_dict(token_dict)
+ for attr in RequestToken.attributes:
+ if (attr == 'authorizing_user_id' or attr == 'verifier'
+ or attr == 'role_ids'):
+ setattr(token_ref, attr, getattr(new_token, attr))
+
+ return token_ref.to_dict()
+
+ def create_access_token(self, request_token_id, token_duration,
+ access_token_id=None, access_token_secret=None):
+ if access_token_id is None:
+ access_token_id = uuid.uuid4().hex
+ if access_token_secret is None:
+ access_token_secret = uuid.uuid4().hex
+ session = sql.get_session()
+ with session.begin():
+ req_token_ref = self._get_request_token(session, request_token_id)
+ token_dict = req_token_ref.to_dict()
+
+ expiry_date = None
+ if token_duration:
+ now = timeutils.utcnow()
+ future = now + datetime.timedelta(seconds=token_duration)
+ expiry_date = timeutils.isotime(future, subsecond=True)
+
+ # add Access Token
+ ref = {}
+ ref['id'] = access_token_id
+ ref['access_secret'] = access_token_secret
+ ref['authorizing_user_id'] = token_dict['authorizing_user_id']
+ ref['project_id'] = token_dict['requested_project_id']
+ ref['role_ids'] = token_dict['role_ids']
+ ref['consumer_id'] = token_dict['consumer_id']
+ ref['expires_at'] = expiry_date
+ token_ref = AccessToken.from_dict(ref)
+ session.add(token_ref)
+
+ # remove request token, it's been used
+ session.delete(req_token_ref)
+
+ return token_ref.to_dict()
+
+ def _get_access_token(self, session, access_token_id):
+ token_ref = session.query(AccessToken).get(access_token_id)
+ if token_ref is None:
+ raise exception.NotFound(_('Access token not found'))
+ return token_ref
+
+ def get_access_token(self, access_token_id):
+ session = sql.get_session()
+ token_ref = self._get_access_token(session, access_token_id)
+ return token_ref.to_dict()
+
+ def list_access_tokens(self, user_id):
+ session = sql.get_session()
+ q = session.query(AccessToken)
+ user_auths = q.filter_by(authorizing_user_id=user_id)
+ return [core.filter_token(x.to_dict()) for x in user_auths]
+
+ def delete_access_token(self, user_id, access_token_id):
+ session = sql.get_session()
+ with session.begin():
+ token_ref = self._get_access_token(session, access_token_id)
+ token_dict = token_ref.to_dict()
+ if token_dict['authorizing_user_id'] != user_id:
+ raise exception.Unauthorized(_('User IDs do not match'))
+
+ session.delete(token_ref)
diff --git a/keystone-moon/keystone/contrib/oauth1/controllers.py b/keystone-moon/keystone/contrib/oauth1/controllers.py
new file mode 100644
index 00000000..fb5d0bc2
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/controllers.py
@@ -0,0 +1,417 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Extensions supporting OAuth1."""
+
+from oslo_config import cfg
+from oslo_serialization import jsonutils
+from oslo_utils import timeutils
+
+from keystone.common import controller
+from keystone.common import dependency
+from keystone.common import wsgi
+from keystone.contrib.oauth1 import core as oauth1
+from keystone.contrib.oauth1 import validator
+from keystone import exception
+from keystone.i18n import _
+from keystone.models import token_model
+from keystone import notifications
+
+
+CONF = cfg.CONF
+
+
+@notifications.internal(notifications.INVALIDATE_USER_OAUTH_CONSUMER_TOKENS,
+ resource_id_arg_index=0)
+def _emit_user_oauth_consumer_token_invalidate(payload):
+ # This is a special case notification that expect the payload to be a dict
+ # containing the user_id and the consumer_id. This is so that the token
+ # provider can invalidate any tokens in the token persistence if
+ # token persistence is enabled
+ pass
+
+
+@dependency.requires('oauth_api', 'token_provider_api')
+class ConsumerCrudV3(controller.V3Controller):
+ collection_name = 'consumers'
+ member_name = 'consumer'
+
+ @classmethod
+ def base_url(cls, context, path=None):
+ """Construct a path and pass it to V3Controller.base_url method."""
+
+ # NOTE(stevemar): Overriding path to /OS-OAUTH1/consumers so that
+ # V3Controller.base_url handles setting the self link correctly.
+ path = '/OS-OAUTH1/' + cls.collection_name
+ return controller.V3Controller.base_url(context, path=path)
+
+ @controller.protected()
+ def create_consumer(self, context, consumer):
+ ref = self._assign_unique_id(self._normalize_dict(consumer))
+ initiator = notifications._get_request_audit_info(context)
+ consumer_ref = self.oauth_api.create_consumer(ref, initiator)
+ return ConsumerCrudV3.wrap_member(context, consumer_ref)
+
+ @controller.protected()
+ def update_consumer(self, context, consumer_id, consumer):
+ self._require_matching_id(consumer_id, consumer)
+ ref = self._normalize_dict(consumer)
+ self._validate_consumer_ref(ref)
+ initiator = notifications._get_request_audit_info(context)
+ ref = self.oauth_api.update_consumer(consumer_id, ref, initiator)
+ return ConsumerCrudV3.wrap_member(context, ref)
+
+ @controller.protected()
+ def list_consumers(self, context):
+ ref = self.oauth_api.list_consumers()
+ return ConsumerCrudV3.wrap_collection(context, ref)
+
+ @controller.protected()
+ def get_consumer(self, context, consumer_id):
+ ref = self.oauth_api.get_consumer(consumer_id)
+ return ConsumerCrudV3.wrap_member(context, ref)
+
+ @controller.protected()
+ def delete_consumer(self, context, consumer_id):
+ user_token_ref = token_model.KeystoneToken(
+ token_id=context['token_id'],
+ token_data=self.token_provider_api.validate_token(
+ context['token_id']))
+ payload = {'user_id': user_token_ref.user_id,
+ 'consumer_id': consumer_id}
+ _emit_user_oauth_consumer_token_invalidate(payload)
+ initiator = notifications._get_request_audit_info(context)
+ self.oauth_api.delete_consumer(consumer_id, initiator)
+
+ def _validate_consumer_ref(self, consumer):
+ if 'secret' in consumer:
+ msg = _('Cannot change consumer secret')
+ raise exception.ValidationError(message=msg)
+
+
+@dependency.requires('oauth_api')
+class AccessTokenCrudV3(controller.V3Controller):
+ collection_name = 'access_tokens'
+ member_name = 'access_token'
+
+ @classmethod
+ def _add_self_referential_link(cls, context, ref):
+ # NOTE(lwolf): overriding method to add proper path to self link
+ ref.setdefault('links', {})
+ path = '/users/%(user_id)s/OS-OAUTH1/access_tokens' % {
+ 'user_id': cls._get_user_id(ref)
+ }
+ ref['links']['self'] = cls.base_url(context, path) + '/' + ref['id']
+
+ @controller.protected()
+ def get_access_token(self, context, user_id, access_token_id):
+ access_token = self.oauth_api.get_access_token(access_token_id)
+ if access_token['authorizing_user_id'] != user_id:
+ raise exception.NotFound()
+ access_token = self._format_token_entity(context, access_token)
+ return AccessTokenCrudV3.wrap_member(context, access_token)
+
+ @controller.protected()
+ def list_access_tokens(self, context, user_id):
+ auth_context = context.get('environment',
+ {}).get('KEYSTONE_AUTH_CONTEXT', {})
+ if auth_context.get('is_delegated_auth'):
+ raise exception.Forbidden(
+ _('Cannot list request tokens'
+ ' with a token issued via delegation.'))
+ refs = self.oauth_api.list_access_tokens(user_id)
+ formatted_refs = ([self._format_token_entity(context, x)
+ for x in refs])
+ return AccessTokenCrudV3.wrap_collection(context, formatted_refs)
+
+ @controller.protected()
+ def delete_access_token(self, context, user_id, access_token_id):
+ access_token = self.oauth_api.get_access_token(access_token_id)
+ consumer_id = access_token['consumer_id']
+ payload = {'user_id': user_id, 'consumer_id': consumer_id}
+ _emit_user_oauth_consumer_token_invalidate(payload)
+ initiator = notifications._get_request_audit_info(context)
+ return self.oauth_api.delete_access_token(
+ user_id, access_token_id, initiator)
+
+ @staticmethod
+ def _get_user_id(entity):
+ return entity.get('authorizing_user_id', '')
+
+ def _format_token_entity(self, context, entity):
+
+ formatted_entity = entity.copy()
+ access_token_id = formatted_entity['id']
+ user_id = self._get_user_id(formatted_entity)
+ if 'role_ids' in entity:
+ formatted_entity.pop('role_ids')
+ if 'access_secret' in entity:
+ formatted_entity.pop('access_secret')
+
+ url = ('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(access_token_id)s'
+ '/roles' % {'user_id': user_id,
+ 'access_token_id': access_token_id})
+
+ formatted_entity.setdefault('links', {})
+ formatted_entity['links']['roles'] = (self.base_url(context, url))
+
+ return formatted_entity
+
+
+@dependency.requires('oauth_api', 'role_api')
+class AccessTokenRolesV3(controller.V3Controller):
+ collection_name = 'roles'
+ member_name = 'role'
+
+ @controller.protected()
+ def list_access_token_roles(self, context, user_id, access_token_id):
+ access_token = self.oauth_api.get_access_token(access_token_id)
+ if access_token['authorizing_user_id'] != user_id:
+ raise exception.NotFound()
+ authed_role_ids = access_token['role_ids']
+ authed_role_ids = jsonutils.loads(authed_role_ids)
+ refs = ([self._format_role_entity(x) for x in authed_role_ids])
+ return AccessTokenRolesV3.wrap_collection(context, refs)
+
+ @controller.protected()
+ def get_access_token_role(self, context, user_id,
+ access_token_id, role_id):
+ access_token = self.oauth_api.get_access_token(access_token_id)
+ if access_token['authorizing_user_id'] != user_id:
+ raise exception.Unauthorized(_('User IDs do not match'))
+ authed_role_ids = access_token['role_ids']
+ authed_role_ids = jsonutils.loads(authed_role_ids)
+ for authed_role_id in authed_role_ids:
+ if authed_role_id == role_id:
+ role = self._format_role_entity(role_id)
+ return AccessTokenRolesV3.wrap_member(context, role)
+ raise exception.RoleNotFound(_('Could not find role'))
+
+ def _format_role_entity(self, role_id):
+ role = self.role_api.get_role(role_id)
+ formatted_entity = role.copy()
+ if 'description' in role:
+ formatted_entity.pop('description')
+ if 'enabled' in role:
+ formatted_entity.pop('enabled')
+ return formatted_entity
+
+
+@dependency.requires('assignment_api', 'oauth_api',
+ 'resource_api', 'token_provider_api')
+class OAuthControllerV3(controller.V3Controller):
+ collection_name = 'not_used'
+ member_name = 'not_used'
+
+ def create_request_token(self, context):
+ headers = context['headers']
+ oauth_headers = oauth1.get_oauth_headers(headers)
+ consumer_id = oauth_headers.get('oauth_consumer_key')
+ requested_project_id = headers.get('Requested-Project-Id')
+
+ if not consumer_id:
+ raise exception.ValidationError(
+ attribute='oauth_consumer_key', target='request')
+ if not requested_project_id:
+ raise exception.ValidationError(
+ attribute='requested_project_id', target='request')
+
+ # NOTE(stevemar): Ensure consumer and requested project exist
+ self.resource_api.get_project(requested_project_id)
+ self.oauth_api.get_consumer(consumer_id)
+
+ url = self.base_url(context, context['path'])
+
+ req_headers = {'Requested-Project-Id': requested_project_id}
+ req_headers.update(headers)
+ request_verifier = oauth1.RequestTokenEndpoint(
+ request_validator=validator.OAuthValidator(),
+ token_generator=oauth1.token_generator)
+ h, b, s = request_verifier.create_request_token_response(
+ url,
+ http_method='POST',
+ body=context['query_string'],
+ headers=req_headers)
+
+ if (not b) or int(s) > 399:
+ msg = _('Invalid signature')
+ raise exception.Unauthorized(message=msg)
+
+ request_token_duration = CONF.oauth1.request_token_duration
+ initiator = notifications._get_request_audit_info(context)
+ token_ref = self.oauth_api.create_request_token(consumer_id,
+ requested_project_id,
+ request_token_duration,
+ initiator)
+
+ result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s'
+ % {'key': token_ref['id'],
+ 'secret': token_ref['request_secret']})
+
+ if CONF.oauth1.request_token_duration:
+ expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at']
+ result += expiry_bit
+
+ headers = [('Content-Type', 'application/x-www-urlformencoded')]
+ response = wsgi.render_response(result,
+ status=(201, 'Created'),
+ headers=headers)
+
+ return response
+
+ def create_access_token(self, context):
+ headers = context['headers']
+ oauth_headers = oauth1.get_oauth_headers(headers)
+ consumer_id = oauth_headers.get('oauth_consumer_key')
+ request_token_id = oauth_headers.get('oauth_token')
+ oauth_verifier = oauth_headers.get('oauth_verifier')
+
+ if not consumer_id:
+ raise exception.ValidationError(
+ attribute='oauth_consumer_key', target='request')
+ if not request_token_id:
+ raise exception.ValidationError(
+ attribute='oauth_token', target='request')
+ if not oauth_verifier:
+ raise exception.ValidationError(
+ attribute='oauth_verifier', target='request')
+
+ req_token = self.oauth_api.get_request_token(
+ request_token_id)
+
+ expires_at = req_token['expires_at']
+ if expires_at:
+ now = timeutils.utcnow()
+ expires = timeutils.normalize_time(
+ timeutils.parse_isotime(expires_at))
+ if now > expires:
+ raise exception.Unauthorized(_('Request token is expired'))
+
+ url = self.base_url(context, context['path'])
+
+ access_verifier = oauth1.AccessTokenEndpoint(
+ request_validator=validator.OAuthValidator(),
+ token_generator=oauth1.token_generator)
+ h, b, s = access_verifier.create_access_token_response(
+ url,
+ http_method='POST',
+ body=context['query_string'],
+ headers=headers)
+ params = oauth1.extract_non_oauth_params(b)
+ if len(params) != 0:
+ msg = _('There should not be any non-oauth parameters')
+ raise exception.Unauthorized(message=msg)
+
+ if req_token['consumer_id'] != consumer_id:
+ msg = _('provided consumer key does not match stored consumer key')
+ raise exception.Unauthorized(message=msg)
+
+ if req_token['verifier'] != oauth_verifier:
+ msg = _('provided verifier does not match stored verifier')
+ raise exception.Unauthorized(message=msg)
+
+ if req_token['id'] != request_token_id:
+ msg = _('provided request key does not match stored request key')
+ raise exception.Unauthorized(message=msg)
+
+ if not req_token.get('authorizing_user_id'):
+ msg = _('Request Token does not have an authorizing user id')
+ raise exception.Unauthorized(message=msg)
+
+ access_token_duration = CONF.oauth1.access_token_duration
+ initiator = notifications._get_request_audit_info(context)
+ token_ref = self.oauth_api.create_access_token(request_token_id,
+ access_token_duration,
+ initiator)
+
+ result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s'
+ % {'key': token_ref['id'],
+ 'secret': token_ref['access_secret']})
+
+ if CONF.oauth1.access_token_duration:
+ expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at'])
+ result += expiry_bit
+
+ headers = [('Content-Type', 'application/x-www-urlformencoded')]
+ response = wsgi.render_response(result,
+ status=(201, 'Created'),
+ headers=headers)
+
+ return response
+
+ @controller.protected()
+ def authorize_request_token(self, context, request_token_id, roles):
+ """An authenticated user is going to authorize a request token.
+
+ As a security precaution, the requested roles must match those in
+ the request token. Because this is in a CLI-only world at the moment,
+ there is not another easy way to make sure the user knows which roles
+ are being requested before authorizing.
+ """
+ auth_context = context.get('environment',
+ {}).get('KEYSTONE_AUTH_CONTEXT', {})
+ if auth_context.get('is_delegated_auth'):
+ raise exception.Forbidden(
+ _('Cannot authorize a request token'
+ ' with a token issued via delegation.'))
+
+ req_token = self.oauth_api.get_request_token(request_token_id)
+
+ expires_at = req_token['expires_at']
+ if expires_at:
+ now = timeutils.utcnow()
+ expires = timeutils.normalize_time(
+ timeutils.parse_isotime(expires_at))
+ if now > expires:
+ raise exception.Unauthorized(_('Request token is expired'))
+
+ # put the roles in a set for easy comparison
+ authed_roles = set()
+ for role in roles:
+ authed_roles.add(role['id'])
+
+ # verify the authorizing user has the roles
+ user_token = token_model.KeystoneToken(
+ token_id=context['token_id'],
+ token_data=self.token_provider_api.validate_token(
+ context['token_id']))
+ user_id = user_token.user_id
+ project_id = req_token['requested_project_id']
+ user_roles = self.assignment_api.get_roles_for_user_and_project(
+ user_id, project_id)
+ cred_set = set(user_roles)
+
+ if not cred_set.issuperset(authed_roles):
+ msg = _('authorizing user does not have role required')
+ raise exception.Unauthorized(message=msg)
+
+ # create list of just the id's for the backend
+ role_list = list(authed_roles)
+
+ # verify the user has the project too
+ req_project_id = req_token['requested_project_id']
+ user_projects = self.assignment_api.list_projects_for_user(user_id)
+ for user_project in user_projects:
+ if user_project['id'] == req_project_id:
+ break
+ else:
+ msg = _("User is not a member of the requested project")
+ raise exception.Unauthorized(message=msg)
+
+ # finally authorize the token
+ authed_token = self.oauth_api.authorize_request_token(
+ request_token_id, user_id, role_list)
+
+ to_return = {'token': {'oauth_verifier': authed_token['verifier']}}
+ return to_return
diff --git a/keystone-moon/keystone/contrib/oauth1/core.py b/keystone-moon/keystone/contrib/oauth1/core.py
new file mode 100644
index 00000000..eeb3e114
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/core.py
@@ -0,0 +1,361 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Extensions supporting OAuth1."""
+
+from __future__ import absolute_import
+
+import abc
+import string
+import uuid
+
+import oauthlib.common
+from oauthlib import oauth1
+from oslo_config import cfg
+from oslo_log import log
+import six
+
+from keystone.common import dependency
+from keystone.common import extension
+from keystone.common import manager
+from keystone import exception
+from keystone.i18n import _LE
+from keystone import notifications
+
+
+RequestValidator = oauth1.RequestValidator
+Client = oauth1.Client
+AccessTokenEndpoint = oauth1.AccessTokenEndpoint
+ResourceEndpoint = oauth1.ResourceEndpoint
+AuthorizationEndpoint = oauth1.AuthorizationEndpoint
+SIG_HMAC = oauth1.SIGNATURE_HMAC
+RequestTokenEndpoint = oauth1.RequestTokenEndpoint
+oRequest = oauthlib.common.Request
+# The characters used to generate verifiers are limited to alphanumerical
+# values for ease of manual entry. Commonly confused characters are omitted.
+VERIFIER_CHARS = string.ascii_letters + string.digits
+CONFUSED_CHARS = 'jiIl1oO0'
+VERIFIER_CHARS = ''.join(c for c in VERIFIER_CHARS if c not in CONFUSED_CHARS)
+
+
+class Token(object):
+ def __init__(self, key, secret):
+ self.key = key
+ self.secret = secret
+ self.verifier = None
+
+ def set_verifier(self, verifier):
+ self.verifier = verifier
+
+
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+
+
+def token_generator(*args, **kwargs):
+ return uuid.uuid4().hex
+
+
+EXTENSION_DATA = {
+ 'name': 'OpenStack OAUTH1 API',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-OAUTH1/v1.0',
+ 'alias': 'OS-OAUTH1',
+ 'updated': '2013-07-07T12:00:0-00:00',
+ 'description': 'OpenStack OAuth 1.0a Delegated Auth Mechanism.',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ # TODO(dolph): link needs to be revised after
+ # bug 928059 merges
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api',
+ }
+ ]}
+extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+
+
+def filter_consumer(consumer_ref):
+ """Filter out private items in a consumer dict.
+
+ 'secret' is never returned.
+
+ :returns: consumer_ref
+
+ """
+ if consumer_ref:
+ consumer_ref = consumer_ref.copy()
+ consumer_ref.pop('secret', None)
+ return consumer_ref
+
+
+def filter_token(access_token_ref):
+ """Filter out private items in an access token dict.
+
+ 'access_secret' is never returned.
+
+ :returns: access_token_ref
+
+ """
+ if access_token_ref:
+ access_token_ref = access_token_ref.copy()
+ access_token_ref.pop('access_secret', None)
+ return access_token_ref
+
+
+def get_oauth_headers(headers):
+ parameters = {}
+
+ # The incoming headers variable is your usual heading from context
+ # In an OAuth signed req, where the oauth variables are in the header,
+ # they with the key 'Authorization'.
+
+ if headers and 'Authorization' in headers:
+ # A typical value for Authorization is seen below
+ # 'OAuth realm="", oauth_body_hash="2jm%3D", oauth_nonce="14475435"
+ # along with other oauth variables, the 'OAuth ' part is trimmed
+ # to split the rest of the headers.
+
+ auth_header = headers['Authorization']
+ params = oauth1.rfc5849.utils.parse_authorization_header(auth_header)
+ parameters.update(dict(params))
+ return parameters
+ else:
+ msg = _LE('Cannot retrieve Authorization headers')
+ LOG.error(msg)
+ raise exception.OAuthHeadersMissingError()
+
+
+def extract_non_oauth_params(query_string):
+ params = oauthlib.common.extract_params(query_string)
+ return {k: v for k, v in params if not k.startswith('oauth_')}
+
+
+@dependency.provider('oauth_api')
+class Manager(manager.Manager):
+ """Default pivot point for the OAuth1 backend.
+
+ See :mod:`keystone.common.manager.Manager` for more details on how this
+ dynamically calls the backend.
+
+ """
+ _ACCESS_TOKEN = "OS-OAUTH1:access_token"
+ _REQUEST_TOKEN = "OS-OAUTH1:request_token"
+ _CONSUMER = "OS-OAUTH1:consumer"
+
+ def __init__(self):
+ super(Manager, self).__init__(CONF.oauth1.driver)
+
+ def create_consumer(self, consumer_ref, initiator=None):
+ ret = self.driver.create_consumer(consumer_ref)
+ notifications.Audit.created(self._CONSUMER, ret['id'], initiator)
+ return ret
+
+ def update_consumer(self, consumer_id, consumer_ref, initiator=None):
+ ret = self.driver.update_consumer(consumer_id, consumer_ref)
+ notifications.Audit.updated(self._CONSUMER, consumer_id, initiator)
+ return ret
+
+ def delete_consumer(self, consumer_id, initiator=None):
+ ret = self.driver.delete_consumer(consumer_id)
+ notifications.Audit.deleted(self._CONSUMER, consumer_id, initiator)
+ return ret
+
+ def create_access_token(self, request_id, access_token_duration,
+ initiator=None):
+ ret = self.driver.create_access_token(request_id,
+ access_token_duration)
+ notifications.Audit.created(self._ACCESS_TOKEN, ret['id'], initiator)
+ return ret
+
+ def delete_access_token(self, user_id, access_token_id, initiator=None):
+ ret = self.driver.delete_access_token(user_id, access_token_id)
+ notifications.Audit.deleted(self._ACCESS_TOKEN, access_token_id,
+ initiator)
+ return ret
+
+ def create_request_token(self, consumer_id, requested_project,
+ request_token_duration, initiator=None):
+ ret = self.driver.create_request_token(
+ consumer_id, requested_project, request_token_duration)
+ notifications.Audit.created(self._REQUEST_TOKEN, ret['id'],
+ initiator)
+ return ret
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Driver(object):
+ """Interface description for an OAuth1 driver."""
+
+ @abc.abstractmethod
+ def create_consumer(self, consumer_ref):
+ """Create consumer.
+
+ :param consumer_ref: consumer ref with consumer name
+ :type consumer_ref: dict
+ :returns: consumer_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def update_consumer(self, consumer_id, consumer_ref):
+ """Update consumer.
+
+ :param consumer_id: id of consumer to update
+ :type consumer_id: string
+ :param consumer_ref: new consumer ref with consumer name
+ :type consumer_ref: dict
+ :returns: consumer_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_consumers(self):
+ """List consumers.
+
+ :returns: list of consumers
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_consumer(self, consumer_id):
+ """Get consumer, returns the consumer id (key)
+ and description.
+
+ :param consumer_id: id of consumer to get
+ :type consumer_id: string
+ :returns: consumer_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_consumer_with_secret(self, consumer_id):
+ """Like get_consumer() but returned consumer_ref includes
+ the consumer secret.
+
+ Secrets should only be shared upon consumer creation; the
+ consumer secret is required to verify incoming OAuth requests.
+
+ :param consumer_id: id of consumer to get
+ :type consumer_id: string
+ :returns: consumer_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_consumer(self, consumer_id):
+ """Delete consumer.
+
+ :param consumer_id: id of consumer to get
+ :type consumer_id: string
+ :returns: None.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def list_access_tokens(self, user_id):
+ """List access tokens.
+
+ :param user_id: search for access tokens authorized by given user id
+ :type user_id: string
+ :returns: list of access tokens the user has authorized
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def delete_access_token(self, user_id, access_token_id):
+ """Delete access token.
+
+ :param user_id: authorizing user id
+ :type user_id: string
+ :param access_token_id: access token to delete
+ :type access_token_id: string
+ :returns: None
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def create_request_token(self, consumer_id, requested_project,
+ request_token_duration):
+ """Create request token.
+
+ :param consumer_id: the id of the consumer
+ :type consumer_id: string
+ :param requested_project_id: requested project id
+ :type requested_project_id: string
+ :param request_token_duration: duration of request token
+ :type request_token_duration: string
+ :returns: request_token_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_request_token(self, request_token_id):
+ """Get request token.
+
+ :param request_token_id: the id of the request token
+ :type request_token_id: string
+ :returns: request_token_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def get_access_token(self, access_token_id):
+ """Get access token.
+
+ :param access_token_id: the id of the access token
+ :type access_token_id: string
+ :returns: access_token_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def authorize_request_token(self, request_id, user_id, role_ids):
+ """Authorize request token.
+
+ :param request_id: the id of the request token, to be authorized
+ :type request_id: string
+ :param user_id: the id of the authorizing user
+ :type user_id: string
+ :param role_ids: list of role ids to authorize
+ :type role_ids: list
+ :returns: verifier
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def create_access_token(self, request_id, access_token_duration):
+ """Create access token.
+
+ :param request_id: the id of the request token, to be deleted
+ :type request_id: string
+ :param access_token_duration: duration of an access token
+ :type access_token_duration: string
+ :returns: access_token_ref
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/__init__.py
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg
new file mode 100644
index 00000000..97ca7810
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg
@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=oauth1
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py
new file mode 100644
index 00000000..a4fbf155
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py
@@ -0,0 +1,67 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine; bind
+ # migrate_engine to your metadata
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ consumer_table = sql.Table(
+ 'consumer',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True, nullable=False),
+ sql.Column('description', sql.String(64), nullable=False),
+ sql.Column('secret', sql.String(64), nullable=False),
+ sql.Column('extra', sql.Text(), nullable=False))
+ consumer_table.create(migrate_engine, checkfirst=True)
+
+ request_token_table = sql.Table(
+ 'request_token',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True, nullable=False),
+ sql.Column('request_secret', sql.String(64), nullable=False),
+ sql.Column('verifier', sql.String(64), nullable=True),
+ sql.Column('authorizing_user_id', sql.String(64), nullable=True),
+ sql.Column('requested_project_id', sql.String(64), nullable=False),
+ sql.Column('requested_roles', sql.Text(), nullable=False),
+ sql.Column('consumer_id', sql.String(64), nullable=False, index=True),
+ sql.Column('expires_at', sql.String(64), nullable=True))
+ request_token_table.create(migrate_engine, checkfirst=True)
+
+ access_token_table = sql.Table(
+ 'access_token',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True, nullable=False),
+ sql.Column('access_secret', sql.String(64), nullable=False),
+ sql.Column('authorizing_user_id', sql.String(64),
+ nullable=False, index=True),
+ sql.Column('project_id', sql.String(64), nullable=False),
+ sql.Column('requested_roles', sql.Text(), nullable=False),
+ sql.Column('consumer_id', sql.String(64), nullable=False),
+ sql.Column('expires_at', sql.String(64), nullable=True))
+ access_token_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ # Operations to reverse the above upgrade go here.
+ tables = ['consumer', 'request_token', 'access_token']
+ for table_name in tables:
+ table = sql.Table(table_name, meta, autoload=True)
+ table.drop()
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py
new file mode 100644
index 00000000..d39df8d5
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py
@@ -0,0 +1,54 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+from keystone.common.sql import migration_helpers
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine; bind
+ # migrate_engine to your metadata
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ consumer_table = sql.Table('consumer', meta, autoload=True)
+ request_token_table = sql.Table('request_token', meta, autoload=True)
+ access_token_table = sql.Table('access_token', meta, autoload=True)
+
+ constraints = [{'table': request_token_table,
+ 'fk_column': 'consumer_id',
+ 'ref_column': consumer_table.c.id},
+ {'table': access_token_table,
+ 'fk_column': 'consumer_id',
+ 'ref_column': consumer_table.c.id}]
+ if meta.bind != 'sqlite':
+ migration_helpers.add_constraints(constraints)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ consumer_table = sql.Table('consumer', meta, autoload=True)
+ request_token_table = sql.Table('request_token', meta, autoload=True)
+ access_token_table = sql.Table('access_token', meta, autoload=True)
+
+ constraints = [{'table': request_token_table,
+ 'fk_column': 'consumer_id',
+ 'ref_column': consumer_table.c.id},
+ {'table': access_token_table,
+ 'fk_column': 'consumer_id',
+ 'ref_column': consumer_table.c.id}]
+ if migrate_engine.name != 'sqlite':
+ migration_helpers.remove_constraints(constraints)
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py
new file mode 100644
index 00000000..e1cf8843
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py
@@ -0,0 +1,29 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ user_table = sql.Table('consumer', meta, autoload=True)
+ user_table.c.description.alter(nullable=True)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ user_table = sql.Table('consumer', meta, autoload=True)
+ user_table.c.description.alter(nullable=False)
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py
new file mode 100644
index 00000000..6f1e2e81
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py
@@ -0,0 +1,35 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ request_token_table = sql.Table('request_token', meta, autoload=True)
+ request_token_table.c.requested_roles.alter(nullable=True)
+ request_token_table.c.requested_roles.alter(name="role_ids")
+ access_token_table = sql.Table('access_token', meta, autoload=True)
+ access_token_table.c.requested_roles.alter(name="role_ids")
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+ request_token_table = sql.Table('request_token', meta, autoload=True)
+ request_token_table.c.role_ids.alter(nullable=False)
+ request_token_table.c.role_ids.alter(name="requested_roles")
+ access_token_table = sql.Table('access_token', meta, autoload=True)
+ access_token_table.c.role_ids.alter(name="requested_roles")
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py
new file mode 100644
index 00000000..428971f8
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py
@@ -0,0 +1,42 @@
+# Copyright 2014 Mirantis.inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sa
+
+
+def upgrade(migrate_engine):
+
+ if migrate_engine.name == 'mysql':
+ meta = sa.MetaData(bind=migrate_engine)
+ table = sa.Table('access_token', meta, autoload=True)
+
+ # NOTE(i159): MySQL requires indexes on referencing columns, and those
+ # indexes create automatically. That those indexes will have different
+ # names, depending on version of MySQL used. We shoud make this naming
+ # consistent, by reverting index name to a consistent condition.
+ if any(i for i in table.indexes if i.columns.keys() == ['consumer_id']
+ and i.name != 'consumer_id'):
+ # NOTE(i159): by this action will be made re-creation of an index
+ # with the new name. This can be considered as renaming under the
+ # MySQL rules.
+ sa.Index('consumer_id', table.c.consumer_id).create()
+
+
+def downgrade(migrate_engine):
+ # NOTE(i159): index exists only in MySQL schemas, and got an inconsistent
+ # name only when MySQL 5.5 renamed it after re-creation
+ # (during migrations). So we just fixed inconsistency, there is no
+ # necessity to revert it.
+ pass
diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/__init__.py
diff --git a/keystone-moon/keystone/contrib/oauth1/routers.py b/keystone-moon/keystone/contrib/oauth1/routers.py
new file mode 100644
index 00000000..35619ede
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/routers.py
@@ -0,0 +1,154 @@
+# Copyright 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.oauth1 import controllers
+
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-OAUTH1', extension_version='1.0')
+
+build_parameter_relation = functools.partial(
+ json_home.build_v3_extension_parameter_relation,
+ extension_name='OS-OAUTH1', extension_version='1.0')
+
+ACCESS_TOKEN_ID_PARAMETER_RELATION = build_parameter_relation(
+ parameter_name='access_token_id')
+
+
+class OAuth1Extension(wsgi.V3ExtensionRouter):
+ """API Endpoints for the OAuth1 extension.
+
+ The goal of this extension is to allow third-party service providers
+ to acquire tokens with a limited subset of a user's roles for acting
+ on behalf of that user. This is done using an oauth-similar flow and
+ api.
+
+ The API looks like::
+
+ # Basic admin-only consumer crud
+ POST /OS-OAUTH1/consumers
+ GET /OS-OAUTH1/consumers
+ PATCH /OS-OAUTH1/consumers/$consumer_id
+ GET /OS-OAUTH1/consumers/$consumer_id
+ DELETE /OS-OAUTH1/consumers/$consumer_id
+
+ # User access token crud
+ GET /users/$user_id/OS-OAUTH1/access_tokens
+ GET /users/$user_id/OS-OAUTH1/access_tokens/$access_token_id
+ GET /users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}/roles
+ GET /users/{user_id}/OS-OAUTH1/access_tokens
+ /{access_token_id}/roles/{role_id}
+ DELETE /users/$user_id/OS-OAUTH1/access_tokens/$access_token_id
+
+ # OAuth interfaces
+ POST /OS-OAUTH1/request_token # create a request token
+ PUT /OS-OAUTH1/authorize # authorize a request token
+ POST /OS-OAUTH1/access_token # create an access token
+
+ """
+
+ def add_routes(self, mapper):
+ consumer_controller = controllers.ConsumerCrudV3()
+ access_token_controller = controllers.AccessTokenCrudV3()
+ access_token_roles_controller = controllers.AccessTokenRolesV3()
+ oauth_controller = controllers.OAuthControllerV3()
+
+ # basic admin-only consumer crud
+ self._add_resource(
+ mapper, consumer_controller,
+ path='/OS-OAUTH1/consumers',
+ get_action='list_consumers',
+ post_action='create_consumer',
+ rel=build_resource_relation(resource_name='consumers'))
+ self._add_resource(
+ mapper, consumer_controller,
+ path='/OS-OAUTH1/consumers/{consumer_id}',
+ get_action='get_consumer',
+ patch_action='update_consumer',
+ delete_action='delete_consumer',
+ rel=build_resource_relation(resource_name='consumer'),
+ path_vars={
+ 'consumer_id':
+ build_parameter_relation(parameter_name='consumer_id'),
+ })
+
+ # user access token crud
+ self._add_resource(
+ mapper, access_token_controller,
+ path='/users/{user_id}/OS-OAUTH1/access_tokens',
+ get_action='list_access_tokens',
+ rel=build_resource_relation(resource_name='user_access_tokens'),
+ path_vars={
+ 'user_id': json_home.Parameters.USER_ID,
+ })
+ self._add_resource(
+ mapper, access_token_controller,
+ path='/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}',
+ get_action='get_access_token',
+ delete_action='delete_access_token',
+ rel=build_resource_relation(resource_name='user_access_token'),
+ path_vars={
+ 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION,
+ 'user_id': json_home.Parameters.USER_ID,
+ })
+ self._add_resource(
+ mapper, access_token_roles_controller,
+ path='/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}/'
+ 'roles',
+ get_action='list_access_token_roles',
+ rel=build_resource_relation(
+ resource_name='user_access_token_roles'),
+ path_vars={
+ 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION,
+ 'user_id': json_home.Parameters.USER_ID,
+ })
+ self._add_resource(
+ mapper, access_token_roles_controller,
+ path='/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}/'
+ 'roles/{role_id}',
+ get_action='get_access_token_role',
+ rel=build_resource_relation(
+ resource_name='user_access_token_role'),
+ path_vars={
+ 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'user_id': json_home.Parameters.USER_ID,
+ })
+
+ # oauth flow calls
+ self._add_resource(
+ mapper, oauth_controller,
+ path='/OS-OAUTH1/request_token',
+ post_action='create_request_token',
+ rel=build_resource_relation(resource_name='request_tokens'))
+ self._add_resource(
+ mapper, oauth_controller,
+ path='/OS-OAUTH1/access_token',
+ post_action='create_access_token',
+ rel=build_resource_relation(resource_name='access_tokens'))
+ self._add_resource(
+ mapper, oauth_controller,
+ path='/OS-OAUTH1/authorize/{request_token_id}',
+ path_vars={
+ 'request_token_id':
+ build_parameter_relation(parameter_name='request_token_id')
+ },
+ put_action='authorize_request_token',
+ rel=build_resource_relation(
+ resource_name='authorize_request_token'))
diff --git a/keystone-moon/keystone/contrib/oauth1/validator.py b/keystone-moon/keystone/contrib/oauth1/validator.py
new file mode 100644
index 00000000..8f44059e
--- /dev/null
+++ b/keystone-moon/keystone/contrib/oauth1/validator.py
@@ -0,0 +1,179 @@
+# Copyright 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""oAuthlib request validator."""
+
+from oslo_log import log
+import six
+
+from keystone.common import dependency
+from keystone.contrib.oauth1 import core as oauth1
+from keystone import exception
+
+
+METHOD_NAME = 'oauth_validator'
+LOG = log.getLogger(__name__)
+
+
+@dependency.requires('oauth_api')
+class OAuthValidator(oauth1.RequestValidator):
+
+ # TODO(mhu) set as option probably?
+ @property
+ def enforce_ssl(self):
+ return False
+
+ @property
+ def safe_characters(self):
+ # oauth tokens are generated from a uuid hex value
+ return set("abcdef0123456789")
+
+ def _check_token(self, token):
+ # generic token verification when they're obtained from a uuid hex
+ return (set(token) <= self.safe_characters and
+ len(token) == 32)
+
+ def check_client_key(self, client_key):
+ return self._check_token(client_key)
+
+ def check_request_token(self, request_token):
+ return self._check_token(request_token)
+
+ def check_access_token(self, access_token):
+ return self._check_token(access_token)
+
+ def check_nonce(self, nonce):
+ # Assuming length is not a concern
+ return set(nonce) <= self.safe_characters
+
+ def check_verifier(self, verifier):
+ return (all(i in oauth1.VERIFIER_CHARS for i in verifier) and
+ len(verifier) == 8)
+
+ def get_client_secret(self, client_key, request):
+ client = self.oauth_api.get_consumer_with_secret(client_key)
+ return client['secret']
+
+ def get_request_token_secret(self, client_key, token, request):
+ token_ref = self.oauth_api.get_request_token(token)
+ return token_ref['request_secret']
+
+ def get_access_token_secret(self, client_key, token, request):
+ access_token = self.oauth_api.get_access_token(token)
+ return access_token['access_secret']
+
+ def get_default_realms(self, client_key, request):
+ # realms weren't implemented with the previous library
+ return []
+
+ def get_realms(self, token, request):
+ return []
+
+ def get_redirect_uri(self, token, request):
+ # OOB (out of band) is supposed to be the default value to use
+ return 'oob'
+
+ def get_rsa_key(self, client_key, request):
+ # HMAC signing is used, so return a dummy value
+ return ''
+
+ def invalidate_request_token(self, client_key, request_token, request):
+ # this method is invoked when an access token is generated out of a
+ # request token, to make sure that request token cannot be consumed
+ # anymore. This is done in the backend, so we do nothing here.
+ pass
+
+ def validate_client_key(self, client_key, request):
+ try:
+ return self.oauth_api.get_consumer(client_key) is not None
+ except exception.NotFound:
+ return False
+
+ def validate_request_token(self, client_key, token, request):
+ try:
+ return self.oauth_api.get_request_token(token) is not None
+ except exception.NotFound:
+ return False
+
+ def validate_access_token(self, client_key, token, request):
+ try:
+ return self.oauth_api.get_access_token(token) is not None
+ except exception.NotFound:
+ return False
+
+ def validate_timestamp_and_nonce(self,
+ client_key,
+ timestamp,
+ nonce,
+ request,
+ request_token=None,
+ access_token=None):
+ return True
+
+ def validate_redirect_uri(self, client_key, redirect_uri, request):
+ # we expect OOB, we don't really care
+ return True
+
+ def validate_requested_realms(self, client_key, realms, request):
+ # realms are not used
+ return True
+
+ def validate_realms(self,
+ client_key,
+ token,
+ request,
+ uri=None,
+ realms=None):
+ return True
+
+ def validate_verifier(self, client_key, token, verifier, request):
+ try:
+ req_token = self.oauth_api.get_request_token(token)
+ return req_token['verifier'] == verifier
+ except exception.NotFound:
+ return False
+
+ def verify_request_token(self, token, request):
+ # there aren't strong expectations on the request token format
+ return isinstance(token, six.string_types)
+
+ def verify_realms(self, token, realms, request):
+ return True
+
+ # The following save_XXX methods are called to create tokens. I chose to
+ # keep the original logic, but the comments below show how that could be
+ # implemented. The real implementation logic is in the backend.
+ def save_access_token(self, token, request):
+ pass
+# token_duration = CONF.oauth1.request_token_duration
+# request_token_id = request.client_key
+# self.oauth_api.create_access_token(request_token_id,
+# token_duration,
+# token["oauth_token"],
+# token["oauth_token_secret"])
+
+ def save_request_token(self, token, request):
+ pass
+# project_id = request.headers.get('Requested-Project-Id')
+# token_duration = CONF.oauth1.request_token_duration
+# self.oauth_api.create_request_token(request.client_key,
+# project_id,
+# token_duration,
+# token["oauth_token"],
+# token["oauth_token_secret"])
+
+ def save_verifier(self, token, verifier, request):
+ # keep the old logic for this, as it is done in two steps and requires
+ # information that the request validator has no access to
+ pass
diff --git a/keystone-moon/keystone/contrib/revoke/__init__.py b/keystone-moon/keystone/contrib/revoke/__init__.py
new file mode 100644
index 00000000..58ba68db
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/__init__.py
@@ -0,0 +1,13 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.revoke.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/revoke/backends/__init__.py b/keystone-moon/keystone/contrib/revoke/backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/backends/__init__.py
diff --git a/keystone-moon/keystone/contrib/revoke/backends/kvs.py b/keystone-moon/keystone/contrib/revoke/backends/kvs.py
new file mode 100644
index 00000000..cc41fbee
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/backends/kvs.py
@@ -0,0 +1,73 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_config import cfg
+from oslo_utils import timeutils
+
+from keystone.common import kvs
+from keystone.contrib import revoke
+from keystone import exception
+from keystone.openstack.common import versionutils
+
+
+CONF = cfg.CONF
+
+_EVENT_KEY = 'os-revoke-events'
+_KVS_BACKEND = 'openstack.kvs.Memory'
+
+
+class Revoke(revoke.Driver):
+
+ @versionutils.deprecated(
+ versionutils.deprecated.JUNO,
+ in_favor_of='keystone.contrib.revoke.backends.sql',
+ remove_in=+1,
+ what='keystone.contrib.revoke.backends.kvs')
+ def __init__(self, **kwargs):
+ super(Revoke, self).__init__()
+ self._store = kvs.get_key_value_store('os-revoke-driver')
+ self._store.configure(backing_store=_KVS_BACKEND, **kwargs)
+
+ def _list_events(self):
+ try:
+ return self._store.get(_EVENT_KEY)
+ except exception.NotFound:
+ return []
+
+ def _prune_expired_events_and_get(self, last_fetch=None, new_event=None):
+ pruned = []
+ results = []
+ expire_delta = datetime.timedelta(seconds=CONF.token.expiration)
+ oldest = timeutils.utcnow() - expire_delta
+ # TODO(ayoung): Store the time of the oldest event so that the
+ # prune process can be skipped if none of the events have timed out.
+ with self._store.get_lock(_EVENT_KEY) as lock:
+ events = self._list_events()
+ if new_event is not None:
+ events.append(new_event)
+
+ for event in events:
+ revoked_at = event.revoked_at
+ if revoked_at > oldest:
+ pruned.append(event)
+ if last_fetch is None or revoked_at > last_fetch:
+ results.append(event)
+ self._store.set(_EVENT_KEY, pruned, lock)
+ return results
+
+ def list_events(self, last_fetch=None):
+ return self._prune_expired_events_and_get(last_fetch=last_fetch)
+
+ def revoke(self, event):
+ self._prune_expired_events_and_get(new_event=event)
diff --git a/keystone-moon/keystone/contrib/revoke/backends/sql.py b/keystone-moon/keystone/contrib/revoke/backends/sql.py
new file mode 100644
index 00000000..1b0cde1e
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/backends/sql.py
@@ -0,0 +1,104 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.common import sql
+from keystone.contrib import revoke
+from keystone.contrib.revoke import model
+
+
+class RevocationEvent(sql.ModelBase, sql.ModelDictMixin):
+ __tablename__ = 'revocation_event'
+ attributes = model.REVOKE_KEYS
+
+ # The id field is not going to be exposed to the outside world.
+ # It is, however, necessary for SQLAlchemy.
+ id = sql.Column(sql.String(64), primary_key=True)
+ domain_id = sql.Column(sql.String(64))
+ project_id = sql.Column(sql.String(64))
+ user_id = sql.Column(sql.String(64))
+ role_id = sql.Column(sql.String(64))
+ trust_id = sql.Column(sql.String(64))
+ consumer_id = sql.Column(sql.String(64))
+ access_token_id = sql.Column(sql.String(64))
+ issued_before = sql.Column(sql.DateTime(), nullable=False)
+ expires_at = sql.Column(sql.DateTime())
+ revoked_at = sql.Column(sql.DateTime(), nullable=False)
+ audit_id = sql.Column(sql.String(32))
+ audit_chain_id = sql.Column(sql.String(32))
+
+
+class Revoke(revoke.Driver):
+ def _flush_batch_size(self, dialect):
+ batch_size = 0
+ if dialect == 'ibm_db_sa':
+ # This functionality is limited to DB2, because
+ # it is necessary to prevent the transaction log
+ # from filling up, whereas at least some of the
+ # other supported databases do not support update
+ # queries with LIMIT subqueries nor do they appear
+ # to require the use of such queries when deleting
+ # large numbers of records at once.
+ batch_size = 100
+ # Limit of 100 is known to not fill a transaction log
+ # of default maximum size while not significantly
+ # impacting the performance of large token purges on
+ # systems where the maximum transaction log size has
+ # been increased beyond the default.
+ return batch_size
+
+ def _prune_expired_events(self):
+ oldest = revoke.revoked_before_cutoff_time()
+
+ session = sql.get_session()
+ dialect = session.bind.dialect.name
+ batch_size = self._flush_batch_size(dialect)
+ if batch_size > 0:
+ query = session.query(RevocationEvent.id)
+ query = query.filter(RevocationEvent.revoked_at < oldest)
+ query = query.limit(batch_size).subquery()
+ delete_query = (session.query(RevocationEvent).
+ filter(RevocationEvent.id.in_(query)))
+ while True:
+ rowcount = delete_query.delete(synchronize_session=False)
+ if rowcount == 0:
+ break
+ else:
+ query = session.query(RevocationEvent)
+ query = query.filter(RevocationEvent.revoked_at < oldest)
+ query.delete(synchronize_session=False)
+
+ session.flush()
+
+ def list_events(self, last_fetch=None):
+ self._prune_expired_events()
+ session = sql.get_session()
+ query = session.query(RevocationEvent).order_by(
+ RevocationEvent.revoked_at)
+
+ if last_fetch:
+ query = query.filter(RevocationEvent.revoked_at > last_fetch)
+
+ events = [model.RevokeEvent(**e.to_dict()) for e in query]
+
+ return events
+
+ def revoke(self, event):
+ kwargs = dict()
+ for attr in model.REVOKE_KEYS:
+ kwargs[attr] = getattr(event, attr)
+ kwargs['id'] = uuid.uuid4().hex
+ record = RevocationEvent(**kwargs)
+ session = sql.get_session()
+ with session.begin():
+ session.add(record)
diff --git a/keystone-moon/keystone/contrib/revoke/controllers.py b/keystone-moon/keystone/contrib/revoke/controllers.py
new file mode 100644
index 00000000..40151bae
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/controllers.py
@@ -0,0 +1,44 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_utils import timeutils
+
+from keystone.common import controller
+from keystone.common import dependency
+from keystone import exception
+from keystone.i18n import _
+
+
+@dependency.requires('revoke_api')
+class RevokeController(controller.V3Controller):
+ @controller.protected()
+ def list_revoke_events(self, context):
+ since = context['query_string'].get('since')
+ last_fetch = None
+ if since:
+ try:
+ last_fetch = timeutils.normalize_time(
+ timeutils.parse_isotime(since))
+ except ValueError:
+ raise exception.ValidationError(
+ message=_('invalid date format %s') % since)
+ events = self.revoke_api.list_events(last_fetch=last_fetch)
+ # Build the links by hand as the standard controller calls require ids
+ response = {'events': [event.to_dict() for event in events],
+ 'links': {
+ 'next': None,
+ 'self': RevokeController.base_url(
+ context,
+ path=context['path']),
+ 'previous': None}
+ }
+ return response
diff --git a/keystone-moon/keystone/contrib/revoke/core.py b/keystone-moon/keystone/contrib/revoke/core.py
new file mode 100644
index 00000000..c7335690
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/core.py
@@ -0,0 +1,250 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import abc
+import datetime
+
+from oslo_config import cfg
+from oslo_log import log
+from oslo_utils import timeutils
+import six
+
+from keystone.common import cache
+from keystone.common import dependency
+from keystone.common import extension
+from keystone.common import manager
+from keystone.contrib.revoke import model
+from keystone import exception
+from keystone.i18n import _
+from keystone import notifications
+from keystone.openstack.common import versionutils
+
+
+CONF = cfg.CONF
+LOG = log.getLogger(__name__)
+
+
+EXTENSION_DATA = {
+ 'name': 'OpenStack Revoke API',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-REVOKE/v1.0',
+ 'alias': 'OS-REVOKE',
+ 'updated': '2014-02-24T20:51:0-00:00',
+ 'description': 'OpenStack revoked token reporting mechanism.',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ 'type': 'text/html',
+ 'href': ('https://github.com/openstack/identity-api/blob/master/'
+ 'openstack-identity-api/v3/src/markdown/'
+ 'identity-api-v3-os-revoke-ext.md'),
+ }
+ ]}
+extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+
+MEMOIZE = cache.get_memoization_decorator(section='revoke')
+
+
+def revoked_before_cutoff_time():
+ expire_delta = datetime.timedelta(
+ seconds=CONF.token.expiration + CONF.revoke.expiration_buffer)
+ oldest = timeutils.utcnow() - expire_delta
+ return oldest
+
+
+@dependency.provider('revoke_api')
+class Manager(manager.Manager):
+ """Revoke API Manager.
+
+ Performs common logic for recording revocations.
+
+ """
+
+ def __init__(self):
+ super(Manager, self).__init__(CONF.revoke.driver)
+ self._register_listeners()
+ self.model = model
+
+ def _user_callback(self, service, resource_type, operation,
+ payload):
+ self.revoke_by_user(payload['resource_info'])
+
+ def _role_callback(self, service, resource_type, operation,
+ payload):
+ self.revoke(
+ model.RevokeEvent(role_id=payload['resource_info']))
+
+ def _project_callback(self, service, resource_type, operation,
+ payload):
+ self.revoke(
+ model.RevokeEvent(project_id=payload['resource_info']))
+
+ def _domain_callback(self, service, resource_type, operation,
+ payload):
+ self.revoke(
+ model.RevokeEvent(domain_id=payload['resource_info']))
+
+ def _trust_callback(self, service, resource_type, operation,
+ payload):
+ self.revoke(
+ model.RevokeEvent(trust_id=payload['resource_info']))
+
+ def _consumer_callback(self, service, resource_type, operation,
+ payload):
+ self.revoke(
+ model.RevokeEvent(consumer_id=payload['resource_info']))
+
+ def _access_token_callback(self, service, resource_type, operation,
+ payload):
+ self.revoke(
+ model.RevokeEvent(access_token_id=payload['resource_info']))
+
+ def _group_callback(self, service, resource_type, operation, payload):
+ user_ids = (u['id'] for u in self.identity_api.list_users_in_group(
+ payload['resource_info']))
+ for uid in user_ids:
+ self.revoke(model.RevokeEvent(user_id=uid))
+
+ def _register_listeners(self):
+ callbacks = {
+ notifications.ACTIONS.deleted: [
+ ['OS-TRUST:trust', self._trust_callback],
+ ['OS-OAUTH1:consumer', self._consumer_callback],
+ ['OS-OAUTH1:access_token', self._access_token_callback],
+ ['role', self._role_callback],
+ ['user', self._user_callback],
+ ['project', self._project_callback],
+ ],
+ notifications.ACTIONS.disabled: [
+ ['user', self._user_callback],
+ ['project', self._project_callback],
+ ['domain', self._domain_callback],
+ ],
+ notifications.ACTIONS.internal: [
+ [notifications.INVALIDATE_USER_TOKEN_PERSISTENCE,
+ self._user_callback],
+ ]
+ }
+
+ for event, cb_info in six.iteritems(callbacks):
+ for resource_type, callback_fns in cb_info:
+ notifications.register_event_callback(event, resource_type,
+ callback_fns)
+
+ def revoke_by_user(self, user_id):
+ return self.revoke(model.RevokeEvent(user_id=user_id))
+
+ def _assert_not_domain_and_project_scoped(self, domain_id=None,
+ project_id=None):
+ if domain_id is not None and project_id is not None:
+ msg = _('The revoke call must not have both domain_id and '
+ 'project_id. This is a bug in the Keystone server. The '
+ 'current request is aborted.')
+ raise exception.UnexpectedError(exception=msg)
+
+ @versionutils.deprecated(as_of=versionutils.deprecated.JUNO,
+ remove_in=0)
+ def revoke_by_expiration(self, user_id, expires_at,
+ domain_id=None, project_id=None):
+
+ self._assert_not_domain_and_project_scoped(domain_id=domain_id,
+ project_id=project_id)
+
+ self.revoke(
+ model.RevokeEvent(user_id=user_id,
+ expires_at=expires_at,
+ domain_id=domain_id,
+ project_id=project_id))
+
+ def revoke_by_audit_id(self, audit_id):
+ self.revoke(model.RevokeEvent(audit_id=audit_id))
+
+ def revoke_by_audit_chain_id(self, audit_chain_id, project_id=None,
+ domain_id=None):
+
+ self._assert_not_domain_and_project_scoped(domain_id=domain_id,
+ project_id=project_id)
+
+ self.revoke(model.RevokeEvent(audit_chain_id=audit_chain_id,
+ domain_id=domain_id,
+ project_id=project_id))
+
+ def revoke_by_grant(self, role_id, user_id=None,
+ domain_id=None, project_id=None):
+ self.revoke(
+ model.RevokeEvent(user_id=user_id,
+ role_id=role_id,
+ domain_id=domain_id,
+ project_id=project_id))
+
+ def revoke_by_user_and_project(self, user_id, project_id):
+ self.revoke(
+ model.RevokeEvent(project_id=project_id, user_id=user_id))
+
+ def revoke_by_project_role_assignment(self, project_id, role_id):
+ self.revoke(model.RevokeEvent(project_id=project_id, role_id=role_id))
+
+ def revoke_by_domain_role_assignment(self, domain_id, role_id):
+ self.revoke(model.RevokeEvent(domain_id=domain_id, role_id=role_id))
+
+ @MEMOIZE
+ def _get_revoke_tree(self):
+ events = self.driver.list_events()
+ revoke_tree = model.RevokeTree(revoke_events=events)
+
+ return revoke_tree
+
+ def check_token(self, token_values):
+ """Checks the values from a token against the revocation list
+
+ :param token_values: dictionary of values from a token,
+ normalized for differences between v2 and v3. The checked values are a
+ subset of the attributes of model.TokenEvent
+
+ :raises exception.TokenNotFound: if the token is invalid
+
+ """
+ if self._get_revoke_tree().is_revoked(token_values):
+ raise exception.TokenNotFound(_('Failed to validate token'))
+
+ def revoke(self, event):
+ self.driver.revoke(event)
+ self._get_revoke_tree.invalidate(self)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Driver(object):
+ """Interface for recording and reporting revocation events."""
+
+ @abc.abstractmethod
+ def list_events(self, last_fetch=None):
+ """return the revocation events, as a list of objects
+
+ :param last_fetch: Time of last fetch. Return all events newer.
+ :returns: A list of keystone.contrib.revoke.model.RevokeEvent
+ newer than `last_fetch.`
+ If no last_fetch is specified, returns all events
+ for tokens issued after the expiration cutoff.
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
+
+ @abc.abstractmethod
+ def revoke(self, event):
+ """register a revocation event
+
+ :param event: An instance of
+ keystone.contrib.revoke.model.RevocationEvent
+
+ """
+ raise exception.NotImplemented() # pragma: no cover
diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/migrate_repo/__init__.py
diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg
new file mode 100644
index 00000000..0e61bcaa
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg
@@ -0,0 +1,25 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=revoke
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_version
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
+
+# When creating new change scripts, Migrate will stamp the new script with
+# a version number. By default this is latest_version + 1. You can set this
+# to 'true' to tell Migrate to use the UTC timestamp instead.
+use_timestamp_numbering=False
diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py
new file mode 100644
index 00000000..7927ce0c
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py
@@ -0,0 +1,47 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+def upgrade(migrate_engine):
+ # Upgrade operations go here. Don't create your own engine; bind
+ # migrate_engine to your metadata
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ service_table = sql.Table(
+ 'revocation_event',
+ meta,
+ sql.Column('id', sql.String(64), primary_key=True),
+ sql.Column('domain_id', sql.String(64)),
+ sql.Column('project_id', sql.String(64)),
+ sql.Column('user_id', sql.String(64)),
+ sql.Column('role_id', sql.String(64)),
+ sql.Column('trust_id', sql.String(64)),
+ sql.Column('consumer_id', sql.String(64)),
+ sql.Column('access_token_id', sql.String(64)),
+ sql.Column('issued_before', sql.DateTime(), nullable=False),
+ sql.Column('expires_at', sql.DateTime()),
+ sql.Column('revoked_at', sql.DateTime(), index=True, nullable=False))
+ service_table.create(migrate_engine, checkfirst=True)
+
+
+def downgrade(migrate_engine):
+ # Operations to reverse the above upgrade go here.
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ tables = ['revocation_event']
+ for t in tables:
+ table = sql.Table(t, meta, autoload=True)
+ table.drop(migrate_engine, checkfirst=True)
diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py
new file mode 100644
index 00000000..bee6fb2a
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py
@@ -0,0 +1,37 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sqlalchemy as sql
+
+
+_TABLE_NAME = 'revocation_event'
+
+
+def upgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ event_table = sql.Table(_TABLE_NAME, meta, autoload=True)
+ audit_id_column = sql.Column('audit_id', sql.String(32), nullable=True)
+ audit_chain_column = sql.Column('audit_chain_id', sql.String(32),
+ nullable=True)
+ event_table.create_column(audit_id_column)
+ event_table.create_column(audit_chain_column)
+
+
+def downgrade(migrate_engine):
+ meta = sql.MetaData()
+ meta.bind = migrate_engine
+
+ event_table = sql.Table(_TABLE_NAME, meta, autoload=True)
+ event_table.drop_column('audit_id')
+ event_table.drop_column('audit_chain_id')
diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/__init__.py
diff --git a/keystone-moon/keystone/contrib/revoke/model.py b/keystone-moon/keystone/contrib/revoke/model.py
new file mode 100644
index 00000000..5e92042d
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/model.py
@@ -0,0 +1,365 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_utils import timeutils
+
+
+# The set of attributes common between the RevokeEvent
+# and the dictionaries created from the token Data.
+_NAMES = ['trust_id',
+ 'consumer_id',
+ 'access_token_id',
+ 'audit_id',
+ 'audit_chain_id',
+ 'expires_at',
+ 'domain_id',
+ 'project_id',
+ 'user_id',
+ 'role_id']
+
+
+# Additional arguments for creating a RevokeEvent
+_EVENT_ARGS = ['issued_before', 'revoked_at']
+
+# Names of attributes in the RevocationEvent, including "virtual" attributes.
+# Virtual attributes are those added based on other values.
+_EVENT_NAMES = _NAMES + ['domain_scope_id']
+
+# Values that will be in the token data but not in the event.
+# These will compared with event values that have different names.
+# For example: both trustor_id and trustee_id are compared against user_id
+_TOKEN_KEYS = ['identity_domain_id',
+ 'assignment_domain_id',
+ 'issued_at',
+ 'trustor_id',
+ 'trustee_id']
+
+
+REVOKE_KEYS = _NAMES + _EVENT_ARGS
+
+
+def blank_token_data(issued_at):
+ token_data = dict()
+ for name in _NAMES:
+ token_data[name] = None
+ for name in _TOKEN_KEYS:
+ token_data[name] = None
+ # required field
+ token_data['issued_at'] = issued_at
+ return token_data
+
+
+class RevokeEvent(object):
+ def __init__(self, **kwargs):
+ for k in REVOKE_KEYS:
+ v = kwargs.get(k, None)
+ setattr(self, k, v)
+
+ if self.domain_id and self.expires_at:
+ # This is revoking a domain-scoped token.
+ self.domain_scope_id = self.domain_id
+ self.domain_id = None
+ else:
+ # This is revoking all tokens for a domain.
+ self.domain_scope_id = None
+
+ if self.expires_at is not None:
+ # Trim off the expiration time because MySQL timestamps are only
+ # accurate to the second.
+ self.expires_at = self.expires_at.replace(microsecond=0)
+
+ if self.revoked_at is None:
+ self.revoked_at = timeutils.utcnow()
+ if self.issued_before is None:
+ self.issued_before = self.revoked_at
+
+ def to_dict(self):
+ keys = ['user_id',
+ 'role_id',
+ 'domain_id',
+ 'domain_scope_id',
+ 'project_id',
+ 'audit_id',
+ 'audit_chain_id',
+ ]
+ event = {key: self.__dict__[key] for key in keys
+ if self.__dict__[key] is not None}
+ if self.trust_id is not None:
+ event['OS-TRUST:trust_id'] = self.trust_id
+ if self.consumer_id is not None:
+ event['OS-OAUTH1:consumer_id'] = self.consumer_id
+ if self.consumer_id is not None:
+ event['OS-OAUTH1:access_token_id'] = self.access_token_id
+ if self.expires_at is not None:
+ event['expires_at'] = timeutils.isotime(self.expires_at)
+ if self.issued_before is not None:
+ event['issued_before'] = timeutils.isotime(self.issued_before,
+ subsecond=True)
+ return event
+
+ def key_for_name(self, name):
+ return "%s=%s" % (name, getattr(self, name) or '*')
+
+
+def attr_keys(event):
+ return map(event.key_for_name, _EVENT_NAMES)
+
+
+class RevokeTree(object):
+ """Fast Revocation Checking Tree Structure
+
+ The Tree is an index to quickly match tokens against events.
+ Each node is a hashtable of key=value combinations from revocation events.
+ The
+
+ """
+
+ def __init__(self, revoke_events=None):
+ self.revoke_map = dict()
+ self.add_events(revoke_events)
+
+ def add_event(self, event):
+ """Updates the tree based on a revocation event.
+
+ Creates any necessary internal nodes in the tree corresponding to the
+ fields of the revocation event. The leaf node will always be set to
+ the latest 'issued_before' for events that are otherwise identical.
+
+ :param: Event to add to the tree
+
+ :returns: the event that was passed in.
+
+ """
+ revoke_map = self.revoke_map
+ for key in attr_keys(event):
+ revoke_map = revoke_map.setdefault(key, {})
+ revoke_map['issued_before'] = max(
+ event.issued_before, revoke_map.get(
+ 'issued_before', event.issued_before))
+ return event
+
+ def remove_event(self, event):
+ """Update the tree based on the removal of a Revocation Event
+
+ Removes empty nodes from the tree from the leaf back to the root.
+
+ If multiple events trace the same path, but have different
+ 'issued_before' values, only the last is ever stored in the tree.
+ So only an exact match on 'issued_before' ever triggers a removal
+
+ :param: Event to remove from the tree
+
+ """
+ stack = []
+ revoke_map = self.revoke_map
+ for name in _EVENT_NAMES:
+ key = event.key_for_name(name)
+ nxt = revoke_map.get(key)
+ if nxt is None:
+ break
+ stack.append((revoke_map, key, nxt))
+ revoke_map = nxt
+ else:
+ if event.issued_before == revoke_map['issued_before']:
+ revoke_map.pop('issued_before')
+ for parent, key, child in reversed(stack):
+ if not any(child):
+ del parent[key]
+
+ def add_events(self, revoke_events):
+ return map(self.add_event, revoke_events or [])
+
+ def is_revoked(self, token_data):
+ """Check if a token matches the revocation event
+
+ Compare the values for each level of the tree with the values from
+ the token, accounting for attributes that have alternative
+ keys, and for wildcard matches.
+ if there is a match, continue down the tree.
+ if there is no match, exit early.
+
+ token_data is a map based on a flattened view of token.
+ The required fields are:
+
+ 'expires_at','user_id', 'project_id', 'identity_domain_id',
+ 'assignment_domain_id', 'trust_id', 'trustor_id', 'trustee_id'
+ 'consumer_id', 'access_token_id'
+
+ """
+ # Alternative names to be checked in token for every field in
+ # revoke tree.
+ alternatives = {
+ 'user_id': ['user_id', 'trustor_id', 'trustee_id'],
+ 'domain_id': ['identity_domain_id', 'assignment_domain_id'],
+ # For a domain-scoped token, the domain is in assignment_domain_id.
+ 'domain_scope_id': ['assignment_domain_id', ],
+ }
+ # Contains current forest (collection of trees) to be checked.
+ partial_matches = [self.revoke_map]
+ # We iterate over every layer of our revoke tree (except the last one).
+ for name in _EVENT_NAMES:
+ # bundle is the set of partial matches for the next level down
+ # the tree
+ bundle = []
+ wildcard = '%s=*' % (name,)
+ # For every tree in current forest.
+ for tree in partial_matches:
+ # If there is wildcard node on current level we take it.
+ bundle.append(tree.get(wildcard))
+ if name == 'role_id':
+ # Roles are very special since a token has a list of them.
+ # If the revocation event matches any one of them,
+ # revoke the token.
+ for role_id in token_data.get('roles', []):
+ bundle.append(tree.get('role_id=%s' % role_id))
+ else:
+ # For other fields we try to get any branch that concur
+ # with any alternative field in the token.
+ for alt_name in alternatives.get(name, [name]):
+ bundle.append(
+ tree.get('%s=%s' % (name, token_data[alt_name])))
+ # tree.get returns `None` if there is no match, so `bundle.append`
+ # adds a 'None' entry. This call remoes the `None` entries.
+ partial_matches = [x for x in bundle if x is not None]
+ if not partial_matches:
+ # If we end up with no branches to follow means that the token
+ # is definitely not in the revoke tree and all further
+ # iterations will be for nothing.
+ return False
+
+ # The last (leaf) level is checked in a special way because we verify
+ # issued_at field differently.
+ for leaf in partial_matches:
+ try:
+ if leaf['issued_before'] > token_data['issued_at']:
+ return True
+ except KeyError:
+ pass
+ # If we made it out of the loop then no element in revocation tree
+ # corresponds to our token and it is good.
+ return False
+
+
+def build_token_values_v2(access, default_domain_id):
+ token_data = access['token']
+
+ token_expires_at = timeutils.parse_isotime(token_data['expires'])
+
+ # Trim off the microseconds because the revocation event only has
+ # expirations accurate to the second.
+ token_expires_at = token_expires_at.replace(microsecond=0)
+
+ token_values = {
+ 'expires_at': timeutils.normalize_time(token_expires_at),
+ 'issued_at': timeutils.normalize_time(
+ timeutils.parse_isotime(token_data['issued_at'])),
+ 'audit_id': token_data.get('audit_ids', [None])[0],
+ 'audit_chain_id': token_data.get('audit_ids', [None])[-1],
+ }
+
+ token_values['user_id'] = access.get('user', {}).get('id')
+
+ project = token_data.get('tenant')
+ if project is not None:
+ token_values['project_id'] = project['id']
+ else:
+ token_values['project_id'] = None
+
+ token_values['identity_domain_id'] = default_domain_id
+ token_values['assignment_domain_id'] = default_domain_id
+
+ trust = token_data.get('trust')
+ if trust is None:
+ token_values['trust_id'] = None
+ token_values['trustor_id'] = None
+ token_values['trustee_id'] = None
+ else:
+ token_values['trust_id'] = trust['id']
+ token_values['trustor_id'] = trust['trustor_id']
+ token_values['trustee_id'] = trust['trustee_id']
+
+ token_values['consumer_id'] = None
+ token_values['access_token_id'] = None
+
+ role_list = []
+ # Roles are by ID in metadata and by name in the user section
+ roles = access.get('metadata', {}).get('roles', [])
+ for role in roles:
+ role_list.append(role)
+ token_values['roles'] = role_list
+ return token_values
+
+
+def build_token_values(token_data):
+
+ token_expires_at = timeutils.parse_isotime(token_data['expires_at'])
+
+ # Trim off the microseconds because the revocation event only has
+ # expirations accurate to the second.
+ token_expires_at = token_expires_at.replace(microsecond=0)
+
+ token_values = {
+ 'expires_at': timeutils.normalize_time(token_expires_at),
+ 'issued_at': timeutils.normalize_time(
+ timeutils.parse_isotime(token_data['issued_at'])),
+ 'audit_id': token_data.get('audit_ids', [None])[0],
+ 'audit_chain_id': token_data.get('audit_ids', [None])[-1],
+ }
+
+ user = token_data.get('user')
+ if user is not None:
+ token_values['user_id'] = user['id']
+ # Federated users do not have a domain, be defensive and get the user
+ # domain set to None in the federated user case.
+ token_values['identity_domain_id'] = user.get('domain', {}).get('id')
+ else:
+ token_values['user_id'] = None
+ token_values['identity_domain_id'] = None
+
+ project = token_data.get('project', token_data.get('tenant'))
+ if project is not None:
+ token_values['project_id'] = project['id']
+ token_values['assignment_domain_id'] = project['domain']['id']
+ else:
+ token_values['project_id'] = None
+
+ domain = token_data.get('domain')
+ if domain is not None:
+ token_values['assignment_domain_id'] = domain['id']
+ else:
+ token_values['assignment_domain_id'] = None
+
+ role_list = []
+ roles = token_data.get('roles')
+ if roles is not None:
+ for role in roles:
+ role_list.append(role['id'])
+ token_values['roles'] = role_list
+
+ trust = token_data.get('OS-TRUST:trust')
+ if trust is None:
+ token_values['trust_id'] = None
+ token_values['trustor_id'] = None
+ token_values['trustee_id'] = None
+ else:
+ token_values['trust_id'] = trust['id']
+ token_values['trustor_id'] = trust['trustor_user']['id']
+ token_values['trustee_id'] = trust['trustee_user']['id']
+
+ oauth1 = token_data.get('OS-OAUTH1')
+ if oauth1 is None:
+ token_values['consumer_id'] = None
+ token_values['access_token_id'] = None
+ else:
+ token_values['consumer_id'] = oauth1['consumer_id']
+ token_values['access_token_id'] = oauth1['access_token_id']
+ return token_values
diff --git a/keystone-moon/keystone/contrib/revoke/routers.py b/keystone-moon/keystone/contrib/revoke/routers.py
new file mode 100644
index 00000000..4d2edfc0
--- /dev/null
+++ b/keystone-moon/keystone/contrib/revoke/routers.py
@@ -0,0 +1,29 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.revoke import controllers
+
+
+class RevokeExtension(wsgi.V3ExtensionRouter):
+
+ PATH_PREFIX = '/OS-REVOKE'
+
+ def add_routes(self, mapper):
+ revoke_controller = controllers.RevokeController()
+ self._add_resource(
+ mapper, revoke_controller,
+ path=self.PATH_PREFIX + '/events',
+ get_action='list_revoke_events',
+ rel=json_home.build_v3_extension_resource_relation(
+ 'OS-REVOKE', '1.0', 'events'))
diff --git a/keystone-moon/keystone/contrib/s3/__init__.py b/keystone-moon/keystone/contrib/s3/__init__.py
new file mode 100644
index 00000000..eec77c72
--- /dev/null
+++ b/keystone-moon/keystone/contrib/s3/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.s3.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/s3/core.py b/keystone-moon/keystone/contrib/s3/core.py
new file mode 100644
index 00000000..34095bf4
--- /dev/null
+++ b/keystone-moon/keystone/contrib/s3/core.py
@@ -0,0 +1,73 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Main entry point into the S3 Credentials service.
+
+This service provides S3 token validation for services configured with the
+s3_token middleware to authorize S3 requests.
+
+This service uses the same credentials used by EC2. Refer to the documentation
+for the EC2 module for how to generate the required credentials.
+"""
+
+import base64
+import hashlib
+import hmac
+
+from keystone.common import extension
+from keystone.common import json_home
+from keystone.common import utils
+from keystone.common import wsgi
+from keystone.contrib.ec2 import controllers
+from keystone import exception
+
+EXTENSION_DATA = {
+ 'name': 'OpenStack S3 API',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 's3tokens/v1.0',
+ 'alias': 's3tokens',
+ 'updated': '2013-07-07T12:00:0-00:00',
+ 'description': 'OpenStack S3 API.',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ # TODO(ayoung): needs a description
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api',
+ }
+ ]}
+extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+
+
+class S3Extension(wsgi.V3ExtensionRouter):
+ def add_routes(self, mapper):
+ controller = S3Controller()
+ # validation
+ self._add_resource(
+ mapper, controller,
+ path='/s3tokens',
+ post_action='authenticate',
+ rel=json_home.build_v3_extension_resource_relation(
+ 's3tokens', '1.0', 's3tokens'))
+
+
+class S3Controller(controllers.Ec2Controller):
+ def check_signature(self, creds_ref, credentials):
+ msg = base64.urlsafe_b64decode(str(credentials['token']))
+ key = str(creds_ref['secret'])
+ signed = base64.encodestring(
+ hmac.new(key, msg, hashlib.sha1).digest()).strip()
+
+ if not utils.auth_str_equal(credentials['signature'], signed):
+ raise exception.Unauthorized('Credential signature mismatch')
diff --git a/keystone-moon/keystone/contrib/simple_cert/__init__.py b/keystone-moon/keystone/contrib/simple_cert/__init__.py
new file mode 100644
index 00000000..b213192e
--- /dev/null
+++ b/keystone-moon/keystone/contrib/simple_cert/__init__.py
@@ -0,0 +1,14 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.simple_cert.core import * # noqa
+from keystone.contrib.simple_cert.routers import SimpleCertExtension # noqa
diff --git a/keystone-moon/keystone/contrib/simple_cert/controllers.py b/keystone-moon/keystone/contrib/simple_cert/controllers.py
new file mode 100644
index 00000000..d34c03a6
--- /dev/null
+++ b/keystone-moon/keystone/contrib/simple_cert/controllers.py
@@ -0,0 +1,42 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_config import cfg
+import webob
+
+from keystone.common import controller
+from keystone.common import dependency
+from keystone import exception
+
+CONF = cfg.CONF
+
+
+@dependency.requires('token_provider_api')
+class SimpleCert(controller.V3Controller):
+
+ def _get_certificate(self, name):
+ try:
+ with open(name, 'r') as f:
+ body = f.read()
+ except IOError:
+ raise exception.CertificateFilesUnavailable()
+
+ # NOTE(jamielennox): We construct the webob Response ourselves here so
+ # that we don't pass through the JSON encoding process.
+ headers = [('Content-Type', 'application/x-pem-file')]
+ return webob.Response(body=body, headerlist=headers, status="200 OK")
+
+ def get_ca_certificate(self, context):
+ return self._get_certificate(CONF.signing.ca_certs)
+
+ def list_certificates(self, context):
+ return self._get_certificate(CONF.signing.certfile)
diff --git a/keystone-moon/keystone/contrib/simple_cert/core.py b/keystone-moon/keystone/contrib/simple_cert/core.py
new file mode 100644
index 00000000..531c6aae
--- /dev/null
+++ b/keystone-moon/keystone/contrib/simple_cert/core.py
@@ -0,0 +1,32 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.common import extension
+
+EXTENSION_DATA = {
+ 'name': 'OpenStack Simple Certificate API',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-SIMPLE-CERT/v1.0',
+ 'alias': 'OS-SIMPLE-CERT',
+ 'updated': '2014-01-20T12:00:0-00:00',
+ 'description': 'OpenStack simple certificate retrieval extension',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ # TODO(dolph): link needs to be revised after
+ # bug 928059 merges
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api',
+ }
+ ]}
+extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
+extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
diff --git a/keystone-moon/keystone/contrib/simple_cert/routers.py b/keystone-moon/keystone/contrib/simple_cert/routers.py
new file mode 100644
index 00000000..8c36c2a4
--- /dev/null
+++ b/keystone-moon/keystone/contrib/simple_cert/routers.py
@@ -0,0 +1,41 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import functools
+
+from keystone.common import json_home
+from keystone.common import wsgi
+from keystone.contrib.simple_cert import controllers
+
+
+build_resource_relation = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-SIMPLE-CERT', extension_version='1.0')
+
+
+class SimpleCertExtension(wsgi.V3ExtensionRouter):
+
+ PREFIX = 'OS-SIMPLE-CERT'
+
+ def add_routes(self, mapper):
+ controller = controllers.SimpleCert()
+
+ self._add_resource(
+ mapper, controller,
+ path='/%s/ca' % self.PREFIX,
+ get_action='get_ca_certificate',
+ rel=build_resource_relation(resource_name='ca_certificate'))
+ self._add_resource(
+ mapper, controller,
+ path='/%s/certificates' % self.PREFIX,
+ get_action='list_certificates',
+ rel=build_resource_relation(resource_name='certificates'))
diff --git a/keystone-moon/keystone/contrib/user_crud/__init__.py b/keystone-moon/keystone/contrib/user_crud/__init__.py
new file mode 100644
index 00000000..271ceee6
--- /dev/null
+++ b/keystone-moon/keystone/contrib/user_crud/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2012 Red Hat, Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.contrib.user_crud.core import * # noqa
diff --git a/keystone-moon/keystone/contrib/user_crud/core.py b/keystone-moon/keystone/contrib/user_crud/core.py
new file mode 100644
index 00000000..dd16d3a5
--- /dev/null
+++ b/keystone-moon/keystone/contrib/user_crud/core.py
@@ -0,0 +1,134 @@
+# Copyright 2012 Red Hat, Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from oslo_log import log
+
+from keystone.common import dependency
+from keystone.common import extension
+from keystone.common import wsgi
+from keystone import exception
+from keystone import identity
+from keystone.models import token_model
+
+
+LOG = log.getLogger(__name__)
+
+
+extension.register_public_extension(
+ 'OS-KSCRUD', {
+ 'name': 'OpenStack Keystone User CRUD',
+ 'namespace': 'http://docs.openstack.org/identity/api/ext/'
+ 'OS-KSCRUD/v1.0',
+ 'alias': 'OS-KSCRUD',
+ 'updated': '2013-07-07T12:00:0-00:00',
+ 'description': 'OpenStack extensions to Keystone v2.0 API '
+ 'enabling User Operations.',
+ 'links': [
+ {
+ 'rel': 'describedby',
+ # TODO(ayoung): needs a description
+ 'type': 'text/html',
+ 'href': 'https://github.com/openstack/identity-api',
+ }
+ ]})
+
+
+@dependency.requires('catalog_api', 'identity_api', 'resource_api',
+ 'token_provider_api')
+class UserController(identity.controllers.User):
+ def set_user_password(self, context, user_id, user):
+ token_id = context.get('token_id')
+ original_password = user.get('original_password')
+
+ token_data = self.token_provider_api.validate_token(token_id)
+ token_ref = token_model.KeystoneToken(token_id=token_id,
+ token_data=token_data)
+
+ if token_ref.user_id != user_id:
+ raise exception.Forbidden('Token belongs to another user')
+ if original_password is None:
+ raise exception.ValidationError(target='user',
+ attribute='original password')
+
+ try:
+ user_ref = self.identity_api.authenticate(
+ context,
+ user_id=token_ref.user_id,
+ password=original_password)
+ if not user_ref.get('enabled', True):
+ # NOTE(dolph): why can't you set a disabled user's password?
+ raise exception.Unauthorized('User is disabled')
+ except AssertionError:
+ raise exception.Unauthorized()
+
+ update_dict = {'password': user['password'], 'id': user_id}
+
+ admin_context = copy.copy(context)
+ admin_context['is_admin'] = True
+ super(UserController, self).set_user_password(admin_context,
+ user_id,
+ update_dict)
+
+ # Issue a new token based upon the original token data. This will
+ # always be a V2.0 token.
+
+ # TODO(morganfainberg): Add a mechanism to issue a new token directly
+ # from a token model so that this code can go away. This is likely
+ # not the norm as most cases do not need to yank apart a token to
+ # issue a new one.
+ new_token_ref = {}
+ metadata_ref = {}
+ roles_ref = None
+
+ new_token_ref['user'] = user_ref
+ if token_ref.bind:
+ new_token_ref['bind'] = token_ref.bind
+ if token_ref.project_id:
+ new_token_ref['tenant'] = self.resource_api.get_project(
+ token_ref.project_id)
+ if token_ref.role_names:
+ roles_ref = [dict(name=value)
+ for value in token_ref.role_names]
+ if token_ref.role_ids:
+ metadata_ref['roles'] = token_ref.role_ids
+ if token_ref.trust_id:
+ metadata_ref['trust'] = {
+ 'id': token_ref.trust_id,
+ 'trustee_user_id': token_ref.trustee_user_id}
+ new_token_ref['metadata'] = metadata_ref
+ new_token_ref['id'] = uuid.uuid4().hex
+
+ catalog_ref = self.catalog_api.get_catalog(user_id,
+ token_ref.project_id)
+
+ new_token_id, new_token_data = self.token_provider_api.issue_v2_token(
+ token_ref=new_token_ref, roles_ref=roles_ref,
+ catalog_ref=catalog_ref)
+ LOG.debug('TOKEN_REF %s', new_token_data)
+ return new_token_data
+
+
+class CrudExtension(wsgi.ExtensionRouter):
+ """Provides a subset of CRUD operations for internal data types."""
+
+ def add_routes(self, mapper):
+ user_controller = UserController()
+
+ mapper.connect('/OS-KSCRUD/users/{user_id}',
+ controller=user_controller,
+ action='set_user_password',
+ conditions=dict(method=['PATCH']))