aboutsummaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/tests/unit
diff options
context:
space:
mode:
Diffstat (limited to 'keystone-moon/keystone/tests/unit')
-rw-r--r--keystone-moon/keystone/tests/unit/__init__.py42
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py112
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/test_backends.py3755
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/test_core.py123
-rw-r--r--keystone-moon/keystone/tests/unit/auth/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/auth/test_controllers.py98
-rw-r--r--keystone-moon/keystone/tests/unit/backend/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/core_ldap.py146
-rw-r--r--keystone-moon/keystone/tests/unit/backend/core_sql.py53
-rw-r--r--keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/domain_config/core.py601
-rw-r--r--keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py41
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py39
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py108
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py71
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py30
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/core.py130
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/test_ldap.py161
-rw-r--r--keystone-moon/keystone/tests/unit/backend/role/test_sql.py40
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/test_backends.py588
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/test_core.py100
-rw-r--r--keystone-moon/keystone/tests/unit/common/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_authorization.py161
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_base64utils.py208
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_connection_pool.py135
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_injection.py238
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_json_home.py91
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_ldap.py584
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_manager.py40
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_notifications.py1248
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_pemutils.py337
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_sql_core.py52
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_utils.py210
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_db2.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf41
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf10
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf9
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf32
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_sql.conf8
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/deprecated.conf8
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf15
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf12
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf13
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf14
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf5
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/contrib/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/contrib/federation/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py725
-rw-r--r--keystone-moon/keystone/tests/unit/core.py907
-rw-r--r--keystone-moon/keystone/tests/unit/default_catalog.templates14
-rw-r--r--keystone-moon/keystone/tests/unit/default_fixtures.py154
-rw-r--r--keystone-moon/keystone/tests/unit/external/README.rst9
-rw-r--r--keystone-moon/keystone/tests/unit/external/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/external/test_timeutils.py33
-rw-r--r--keystone-moon/keystone/tests/unit/fakeldap.py664
-rw-r--r--keystone-moon/keystone/tests/unit/federation_fixtures.py28
-rw-r--r--keystone-moon/keystone/tests/unit/filtering.py124
-rw-r--r--keystone-moon/keystone/tests/unit/identity/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_backends.py1297
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_controllers.py65
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_core.py176
-rw-r--r--keystone-moon/keystone/tests/unit/identity_mapping.py22
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/__init__.py17
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/appserver.py79
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py34
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/cache.py43
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/database.py158
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/hacking.py417
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py30
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py35
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/policy.py33
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py29
-rw-r--r--keystone-moon/keystone/tests/unit/mapping_fixtures.py1486
-rw-r--r--keystone-moon/keystone/tests/unit/policy/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/policy/test_backends.py86
-rw-r--r--keystone-moon/keystone/tests/unit/resource/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/resource/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/resource/backends/test_sql.py24
-rw-r--r--keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py53
-rw-r--r--keystone-moon/keystone/tests/unit/resource/test_backends.py1669
-rw-r--r--keystone-moon/keystone/tests/unit/resource/test_controllers.py57
-rw-r--r--keystone-moon/keystone/tests/unit/resource/test_core.py692
-rw-r--r--keystone-moon/keystone/tests/unit/rest.py261
-rw-r--r--keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml25
-rw-r--r--keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml69
-rw-r--r--keystone-moon/keystone/tests/unit/schema/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/schema/v2.py161
-rw-r--r--keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py1391
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth.py1446
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth_plugin.py190
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend.py6851
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py249
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py38
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_federation_sql.py51
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py198
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_kvs.py113
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap.py3287
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py243
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_rules.py63
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_sql.py1025
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_templated.py261
-rw-r--r--keystone-moon/keystone/tests/unit/test_cache.py324
-rw-r--r--keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py728
-rw-r--r--keystone-moon/keystone/tests/unit/test_catalog.py355
-rw-r--r--keystone-moon/keystone/tests/unit/test_cert_setup.py243
-rw-r--r--keystone-moon/keystone/tests/unit/test_cli.py478
-rw-r--r--keystone-moon/keystone/tests/unit/test_config.py82
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_ec2.py208
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_s3_core.py103
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py57
-rw-r--r--keystone-moon/keystone/tests/unit/test_credential.py265
-rw-r--r--keystone-moon/keystone/tests/unit/test_driver_hints.py60
-rw-r--r--keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py34
-rw-r--r--keystone-moon/keystone/tests/unit/test_entry_points.py48
-rw-r--r--keystone-moon/keystone/tests/unit/test_exception.py273
-rw-r--r--keystone-moon/keystone/tests/unit/test_hacking_checks.py143
-rw-r--r--keystone-moon/keystone/tests/unit/test_ipv6.py51
-rw-r--r--keystone-moon/keystone/tests/unit/test_kvs.py586
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_livetest.py217
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py202
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py119
-rw-r--r--keystone-moon/keystone/tests/unit/test_middleware.py764
-rw-r--r--keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py60
-rw-r--r--keystone-moon/keystone/tests/unit/test_policy.py222
-rw-r--r--keystone-moon/keystone/tests/unit/test_revoke.py622
-rw-r--r--keystone-moon/keystone/tests/unit/test_singular_plural.py48
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_livetest.py49
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py112
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_upgrade.py1195
-rw-r--r--keystone-moon/keystone/tests/unit/test_ssl.py186
-rw-r--r--keystone-moon/keystone/tests/unit/test_token_bind.py198
-rw-r--r--keystone-moon/keystone/tests/unit/test_token_provider.py845
-rw-r--r--keystone-moon/keystone/tests/unit/test_url_middleware.py54
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2.py1590
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_controller.py186
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py1376
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py344
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3.py1640
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_assignment.py2871
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_auth.py4955
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_catalog.py924
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_controller.py53
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_credential.py478
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_domain_config.py459
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py246
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_federation.py3722
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_filters.py435
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_identity.py795
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_oauth1.py907
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_os_revoke.py136
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_policy.py63
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_protection.py1777
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_resource.py1434
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_trust.py403
-rw-r--r--keystone-moon/keystone/tests/unit/test_validation.py2115
-rw-r--r--keystone-moon/keystone/tests/unit/test_versions.py1065
-rw-r--r--keystone-moon/keystone/tests/unit/test_wsgi.py586
-rw-r--r--keystone-moon/keystone/tests/unit/tests/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/tests/test_core.py53
-rw-r--r--keystone-moon/keystone/tests/unit/tests/test_utils.py37
-rw-r--r--keystone-moon/keystone/tests/unit/token/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_backends.py551
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_fernet_provider.py611
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_pki_provider.py26
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py26
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_provider.py30
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_token_data_helper.py56
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_token_model.py263
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_uuid_provider.py26
-rw-r--r--keystone-moon/keystone/tests/unit/trust/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/trust/test_backends.py172
-rw-r--r--keystone-moon/keystone/tests/unit/utils.py85
195 files changed, 0 insertions, 74291 deletions
diff --git a/keystone-moon/keystone/tests/unit/__init__.py b/keystone-moon/keystone/tests/unit/__init__.py
deleted file mode 100644
index 0e92ca65..00000000
--- a/keystone-moon/keystone/tests/unit/__init__.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import oslo_i18n
-import six
-
-
-if six.PY3:
- # NOTE(dstanek): This block will monkey patch libraries that are not
- # yet supported in Python3. We do this that that it is possible to
- # execute any tests at all. Without monkey patching modules the
- # tests will fail with import errors.
-
- import sys
- from unittest import mock # noqa: our import detection is naive?
-
- sys.modules['ldap'] = mock.Mock()
- sys.modules['ldap.controls'] = mock.Mock()
- sys.modules['ldap.dn'] = mock.Mock()
- sys.modules['ldap.filter'] = mock.Mock()
- sys.modules['ldap.modlist'] = mock.Mock()
- sys.modules['ldappool'] = mock.Mock()
-
-
-# NOTE(dstanek): oslo_i18n.enable_lazy() must be called before
-# keystone.i18n._() is called to ensure it has the desired lazy lookup
-# behavior. This includes cases, like keystone.exceptions, where
-# keystone.i18n._() is called at import time.
-oslo_i18n.enable_lazy()
-
-from keystone.tests.unit.core import * # noqa
diff --git a/keystone-moon/keystone/tests/unit/assignment/__init__.py b/keystone-moon/keystone/tests/unit/assignment/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/assignment/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py b/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py b/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py
deleted file mode 100644
index 37e2d924..00000000
--- a/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystone.common import sql
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit.assignment import test_core
-from keystone.tests.unit.backend import core_sql
-
-
-class SqlRoleModels(core_sql.BaseBackendSqlModels):
-
- def test_role_model(self):
- cols = (('id', sql.String, 64),
- ('name', sql.String, 255),
- ('domain_id', sql.String, 64))
- self.assertExpectedSchema('role', cols)
-
-
-class SqlRole(core_sql.BaseBackendSqlTests, test_core.RoleTests):
-
- def test_create_null_role_name(self):
- role = unit.new_role_ref(name=None)
- self.assertRaises(exception.UnexpectedError,
- self.role_api.create_role,
- role['id'],
- role)
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role['id'])
-
- def test_create_duplicate_role_domain_specific_name_fails(self):
- domain = unit.new_domain_ref()
- role1 = unit.new_role_ref(domain_id=domain['id'])
- self.role_api.create_role(role1['id'], role1)
- role2 = unit.new_role_ref(name=role1['name'],
- domain_id=domain['id'])
- self.assertRaises(exception.Conflict,
- self.role_api.create_role,
- role2['id'],
- role2)
-
- def test_update_domain_id_of_role_fails(self):
- # Create a global role
- role1 = unit.new_role_ref()
- role1 = self.role_api.create_role(role1['id'], role1)
- # Try and update it to be domain specific
- domainA = unit.new_domain_ref()
- role1['domain_id'] = domainA['id']
- self.assertRaises(exception.ValidationError,
- self.role_api.update_role,
- role1['id'],
- role1)
-
- # Create a domain specific role from scratch
- role2 = unit.new_role_ref(domain_id=domainA['id'])
- self.role_api.create_role(role2['id'], role2)
- # Try to "move" it to another domain
- domainB = unit.new_domain_ref()
- role2['domain_id'] = domainB['id']
- self.assertRaises(exception.ValidationError,
- self.role_api.update_role,
- role2['id'],
- role2)
- # Now try to make it global
- role2['domain_id'] = None
- self.assertRaises(exception.ValidationError,
- self.role_api.update_role,
- role2['id'],
- role2)
-
- def test_domain_specific_separation(self):
- domain1 = unit.new_domain_ref()
- role1 = unit.new_role_ref(domain_id=domain1['id'])
- role_ref1 = self.role_api.create_role(role1['id'], role1)
- self.assertDictEqual(role1, role_ref1)
- # Check we can have the same named role in a different domain
- domain2 = unit.new_domain_ref()
- role2 = unit.new_role_ref(name=role1['name'], domain_id=domain2['id'])
- role_ref2 = self.role_api.create_role(role2['id'], role2)
- self.assertDictEqual(role2, role_ref2)
- # ...and in fact that you can have the same named role as a global role
- role3 = unit.new_role_ref(name=role1['name'])
- role_ref3 = self.role_api.create_role(role3['id'], role3)
- self.assertDictEqual(role3, role_ref3)
- # Check that updating one doesn't change the others
- role1['name'] = uuid.uuid4().hex
- self.role_api.update_role(role1['id'], role1)
- role_ref1 = self.role_api.get_role(role1['id'])
- self.assertDictEqual(role1, role_ref1)
- role_ref2 = self.role_api.get_role(role2['id'])
- self.assertDictEqual(role2, role_ref2)
- role_ref3 = self.role_api.get_role(role3['id'])
- self.assertDictEqual(role3, role_ref3)
- # Check that deleting one of these, doesn't affect the others
- self.role_api.delete_role(role1['id'])
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role1['id'])
- self.role_api.get_role(role2['id'])
- self.role_api.get_role(role3['id'])
diff --git a/keystone-moon/keystone/tests/unit/assignment/test_backends.py b/keystone-moon/keystone/tests/unit/assignment/test_backends.py
deleted file mode 100644
index eb40e569..00000000
--- a/keystone-moon/keystone/tests/unit/assignment/test_backends.py
+++ /dev/null
@@ -1,3755 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import mock
-from oslo_config import cfg
-from six.moves import range
-from testtools import matchers
-
-from keystone import exception
-from keystone.tests import unit
-
-
-CONF = cfg.CONF
-
-
-class AssignmentTestHelperMixin(object):
- """Mixin class to aid testing of assignments.
-
- This class supports data driven test plans that enable:
-
- - Creation of initial entities, such as domains, users, groups, projects
- and roles
- - Creation of assignments referencing the above entities
- - A set of input parameters and expected outputs to list_role_assignments
- based on the above test data
-
- A test plan is a dict of the form:
-
- test_plan = {
- entities: details and number of entities,
- group_memberships: group-user entity memberships,
- assignments: list of assignments to create,
- tests: list of pairs of input params and expected outputs}
-
- An example test plan:
-
- test_plan = {
- # First, create the entities required. Entities are specified by
- # a dict with the key being the entity type and the value an
- # entity specification which can be one of:
- #
- # - a simple number, e.g. {'users': 3} creates 3 users
- # - a dict where more information regarding the contents of the entity
- # is required, e.g. {'domains' : {'users : 3}} creates a domain
- # with three users
- # - a list of entity specifications if multiple are required
- #
- # The following creates a domain that contains a single user, group and
- # project, as well as creating three roles.
-
- 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1},
- 'roles': 3},
-
- # If it is required that an existing domain be used for the new
- # entities, then the id of that domain can be included in the
- # domain dict. For example, if alternatively we wanted to add 3 users
- # to the default domain, add a second domain containing 3 projects as
- # well as 5 additional empty domains, the entities would be defined as:
- #
- # 'entities': {'domains': [{'id': DEFAULT_DOMAIN, 'users': 3},
- # {'projects': 3}, 5]},
- #
- # A project hierarchy can be specified within the 'projects' section by
- # nesting the 'project' key, for example to create a project with three
- # sub-projects you would use:
-
- 'projects': {'project': 3}
-
- # A more complex hierarchy can also be defined, for example the
- # following would define three projects each containing a
- # sub-project, each of which contain a further three sub-projects.
-
- 'projects': [{'project': {'project': 3}},
- {'project': {'project': 3}},
- {'project': {'project': 3}}]
-
- # If the 'roles' entity count is defined as top level key in 'entities'
- # dict then these are global roles. If it is placed within the
- # 'domain' dict, then they will be domain specific roles. A mix of
- # domain specific and global roles are allowed, with the role index
- # being calculated in the order they are defined in the 'entities'
- # dict.
-
- # A set of implied role specifications. In this case, prior role
- # index 0 implies role index 1, and role 1 implies roles 2 and 3.
-
- 'roles': [{'role': 0, 'implied_roles': [1]},
- {'role': 1, 'implied_roles': [2, 3]}]
-
- # A list of groups and their members. In this case make users with
- # index 0 and 1 members of group with index 0. Users and Groups are
- # indexed in the order they appear in the 'entities' key above.
-
- 'group_memberships': [{'group': 0, 'users': [0, 1]}]
-
- # Next, create assignments between the entities, referencing the
- # entities by index, i.e. 'user': 0 refers to user[0]. Entities are
- # indexed in the order they appear in the 'entities' key above within
- # their entity type.
-
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'user': 0, 'role': 2, 'project': 0}],
-
- # Finally, define an array of tests where list_role_assignment() is
- # called with the given input parameters and the results are then
- # confirmed to be as given in 'results'. Again, all entities are
- # referenced by index.
-
- 'tests': [
- {'params': {},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'user': 0, 'role': 2, 'project': 0}]},
- {'params': {'role': 2},
- 'results': [{'group': 0, 'role': 2, 'domain': 0},
- {'user': 0, 'role': 2, 'project': 0}]}]
-
- # The 'params' key also supports the 'effective',
- # 'inherited_to_projects' and 'source_from_group_ids' options to
- # list_role_assignments.}
-
- """
-
- def _handle_project_spec(self, test_data, domain_id, project_spec,
- parent_id=None):
- """Handle the creation of a project or hierarchy of projects.
-
- project_spec may either be a count of the number of projects to
- create, or it may be a list of the form:
-
- [{'project': project_spec}, {'project': project_spec}, ...]
-
- This method is called recursively to handle the creation of a
- hierarchy of projects.
-
- """
- def _create_project(domain_id, parent_id):
- new_project = unit.new_project_ref(domain_id=domain_id,
- parent_id=parent_id)
- new_project = self.resource_api.create_project(new_project['id'],
- new_project)
- return new_project
-
- if isinstance(project_spec, list):
- for this_spec in project_spec:
- self._handle_project_spec(
- test_data, domain_id, this_spec, parent_id=parent_id)
- elif isinstance(project_spec, dict):
- new_proj = _create_project(domain_id, parent_id)
- test_data['projects'].append(new_proj)
- self._handle_project_spec(
- test_data, domain_id, project_spec['project'],
- parent_id=new_proj['id'])
- else:
- for _ in range(project_spec):
- test_data['projects'].append(
- _create_project(domain_id, parent_id))
-
- def _create_role(self, domain_id=None):
- new_role = unit.new_role_ref(domain_id=domain_id)
- return self.role_api.create_role(new_role['id'], new_role)
-
- def _handle_domain_spec(self, test_data, domain_spec):
- """Handle the creation of domains and their contents.
-
- domain_spec may either be a count of the number of empty domains to
- create, a dict describing the domain contents, or a list of
- domain_specs.
-
- In the case when a list is provided, this method calls itself
- recursively to handle the list elements.
-
- This method will insert any entities created into test_data
-
- """
- def _create_domain(domain_id=None):
- if domain_id is None:
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'],
- new_domain)
- return new_domain
- else:
- # The test plan specified an existing domain to use
- return self.resource_api.get_domain(domain_id)
-
- def _create_entity_in_domain(entity_type, domain_id):
- """Create a user or group entity in the domain."""
- if entity_type == 'users':
- new_entity = unit.new_user_ref(domain_id=domain_id)
- new_entity = self.identity_api.create_user(new_entity)
- elif entity_type == 'groups':
- new_entity = unit.new_group_ref(domain_id=domain_id)
- new_entity = self.identity_api.create_group(new_entity)
- elif entity_type == 'roles':
- new_entity = self._create_role(domain_id=domain_id)
- else:
- # Must be a bad test plan
- raise exception.NotImplemented()
- return new_entity
-
- if isinstance(domain_spec, list):
- for x in domain_spec:
- self._handle_domain_spec(test_data, x)
- elif isinstance(domain_spec, dict):
- # If there is a domain ID specified, then use it
- the_domain = _create_domain(domain_spec.get('id'))
- test_data['domains'].append(the_domain)
- for entity_type, value in domain_spec.items():
- if entity_type == 'id':
- # We already used this above to determine whether to
- # use and existing domain
- continue
- if entity_type == 'projects':
- # If it's projects, we need to handle the potential
- # specification of a project hierarchy
- self._handle_project_spec(
- test_data, the_domain['id'], value)
- else:
- # It's a count of number of entities
- for _ in range(value):
- test_data[entity_type].append(
- _create_entity_in_domain(
- entity_type, the_domain['id']))
- else:
- for _ in range(domain_spec):
- test_data['domains'].append(_create_domain())
-
- def create_entities(self, entity_pattern):
- """Create the entities specified in the test plan.
-
- Process the 'entities' key in the test plan, creating the requested
- entities. Each created entity will be added to the array of entities
- stored in the returned test_data object, e.g.:
-
- test_data['users'] = [user[0], user[1]....]
-
- """
- test_data = {}
- for entity in ['users', 'groups', 'domains', 'projects', 'roles']:
- test_data[entity] = []
-
- # Create any domains requested and, if specified, any entities within
- # those domains
- if 'domains' in entity_pattern:
- self._handle_domain_spec(test_data, entity_pattern['domains'])
-
- # Create any roles requested
- if 'roles' in entity_pattern:
- for _ in range(entity_pattern['roles']):
- test_data['roles'].append(self._create_role())
-
- return test_data
-
- def _convert_entity_shorthand(self, key, shorthand_data, reference_data):
- """Convert a shorthand entity description into a full ID reference.
-
- In test plan definitions, we allow a shorthand for referencing to an
- entity of the form:
-
- 'user': 0
-
- which is actually shorthand for:
-
- 'user_id': reference_data['users'][0]['id']
-
- This method converts the shorthand version into the full reference.
-
- """
- expanded_key = '%s_id' % key
- reference_index = '%ss' % key
- index_value = (
- reference_data[reference_index][shorthand_data[key]]['id'])
- return expanded_key, index_value
-
- def create_implied_roles(self, implied_pattern, test_data):
- """Create the implied roles specified in the test plan."""
- for implied_spec in implied_pattern:
- # Each implied role specification is a dict of the form:
- #
- # {'role': 0, 'implied_roles': list of roles}
-
- prior_role = test_data['roles'][implied_spec['role']]['id']
- if isinstance(implied_spec['implied_roles'], list):
- for this_role in implied_spec['implied_roles']:
- implied_role = test_data['roles'][this_role]['id']
- self.role_api.create_implied_role(prior_role, implied_role)
- else:
- implied_role = (
- test_data['roles'][implied_spec['implied_roles']]['id'])
- self.role_api.create_implied_role(prior_role, implied_role)
-
- def create_group_memberships(self, group_pattern, test_data):
- """Create the group memberships specified in the test plan."""
- for group_spec in group_pattern:
- # Each membership specification is a dict of the form:
- #
- # {'group': 0, 'users': [list of user indexes]}
- #
- # Add all users in the list to the specified group, first
- # converting from index to full entity ID.
- group_value = test_data['groups'][group_spec['group']]['id']
- for user_index in group_spec['users']:
- user_value = test_data['users'][user_index]['id']
- self.identity_api.add_user_to_group(user_value, group_value)
- return test_data
-
- def create_assignments(self, assignment_pattern, test_data):
- """Create the assignments specified in the test plan."""
- # First store how many assignments are already in the system,
- # so during the tests we can check the number of new assignments
- # created.
- test_data['initial_assignment_count'] = (
- len(self.assignment_api.list_role_assignments()))
-
- # Now create the new assignments in the test plan
- for assignment in assignment_pattern:
- # Each assignment is a dict of the form:
- #
- # { 'user': 0, 'project':1, 'role': 6}
- #
- # where the value of each item is the index into the array of
- # entities created earlier.
- #
- # We process the assignment dict to create the args required to
- # make the create_grant() call.
- args = {}
- for param in assignment:
- if param == 'inherited_to_projects':
- args[param] = assignment[param]
- else:
- # Turn 'entity : 0' into 'entity_id = ac6736ba873d'
- # where entity in user, group, project or domain
- key, value = self._convert_entity_shorthand(
- param, assignment, test_data)
- args[key] = value
- self.assignment_api.create_grant(**args)
- return test_data
-
- def execute_assignment_cases(self, test_plan, test_data):
- """Execute the test plan, based on the created test_data."""
- def check_results(expected, actual, param_arg_count):
- if param_arg_count == 0:
- # It was an unfiltered call, so default fixture assignments
- # might be polluting our answer - so we take into account
- # how many assignments there were before the test.
- self.assertEqual(
- len(expected) + test_data['initial_assignment_count'],
- len(actual))
- else:
- self.assertThat(actual, matchers.HasLength(len(expected)))
-
- for each_expected in expected:
- expected_assignment = {}
- for param in each_expected:
- if param == 'inherited_to_projects':
- expected_assignment[param] = each_expected[param]
- elif param == 'indirect':
- # We're expecting the result to contain an indirect
- # dict with the details how the role came to be placed
- # on this entity - so convert the key/value pairs of
- # that dict into real entity references.
- indirect_term = {}
- for indirect_param in each_expected[param]:
- key, value = self._convert_entity_shorthand(
- indirect_param, each_expected[param],
- test_data)
- indirect_term[key] = value
- expected_assignment[param] = indirect_term
- else:
- # Convert a simple shorthand entry into a full
- # entity reference
- key, value = self._convert_entity_shorthand(
- param, each_expected, test_data)
- expected_assignment[key] = value
- self.assertIn(expected_assignment, actual)
-
- def convert_group_ids_sourced_from_list(index_list, reference_data):
- value_list = []
- for group_index in index_list:
- value_list.append(
- reference_data['groups'][group_index]['id'])
- return value_list
-
- # Go through each test in the array, processing the input params, which
- # we build into an args dict, and then call list_role_assignments. Then
- # check the results against those specified in the test plan.
- for test in test_plan.get('tests', []):
- args = {}
- for param in test['params']:
- if param in ['effective', 'inherited', 'include_subtree']:
- # Just pass the value into the args
- args[param] = test['params'][param]
- elif param == 'source_from_group_ids':
- # Convert the list of indexes into a list of IDs
- args[param] = convert_group_ids_sourced_from_list(
- test['params']['source_from_group_ids'], test_data)
- else:
- # Turn 'entity : 0' into 'entity_id = ac6736ba873d'
- # where entity in user, group, project or domain
- key, value = self._convert_entity_shorthand(
- param, test['params'], test_data)
- args[key] = value
- results = self.assignment_api.list_role_assignments(**args)
- check_results(test['results'], results, len(args))
-
- def execute_assignment_plan(self, test_plan):
- """Create entities, assignments and execute the test plan.
-
- The standard method to call to create entities and assignments and
- execute the tests as specified in the test_plan. The test_data
- dict is returned so that, if required, the caller can execute
- additional manual tests with the entities and assignments created.
-
- """
- test_data = self.create_entities(test_plan['entities'])
- if 'implied_roles' in test_plan:
- self.create_implied_roles(test_plan['implied_roles'], test_data)
- if 'group_memberships' in test_plan:
- self.create_group_memberships(test_plan['group_memberships'],
- test_data)
- if 'assignments' in test_plan:
- test_data = self.create_assignments(test_plan['assignments'],
- test_data)
- self.execute_assignment_cases(test_plan, test_data)
- return test_data
-
-
-class AssignmentTests(AssignmentTestHelperMixin):
-
- def _get_domain_fixture(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- return domain
-
- def test_project_add_and_remove_user_role(self):
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_bar['id'])
- self.assertNotIn(self.user_two['id'], user_ids)
-
- self.assignment_api.add_role_to_user_and_project(
- tenant_id=self.tenant_bar['id'],
- user_id=self.user_two['id'],
- role_id=self.role_other['id'])
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_bar['id'])
- self.assertIn(self.user_two['id'], user_ids)
-
- self.assignment_api.remove_role_from_user_and_project(
- tenant_id=self.tenant_bar['id'],
- user_id=self.user_two['id'],
- role_id=self.role_other['id'])
-
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_bar['id'])
- self.assertNotIn(self.user_two['id'], user_ids)
-
- def test_remove_user_role_not_assigned(self):
- # Expect failure if attempt to remove a role that was never assigned to
- # the user.
- self.assertRaises(exception.RoleNotFound,
- self.assignment_api.
- remove_role_from_user_and_project,
- tenant_id=self.tenant_bar['id'],
- user_id=self.user_two['id'],
- role_id=self.role_other['id'])
-
- def test_list_user_ids_for_project(self):
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_baz['id'])
- self.assertEqual(2, len(user_ids))
- self.assertIn(self.user_two['id'], user_ids)
- self.assertIn(self.user_badguy['id'], user_ids)
-
- def test_list_user_ids_for_project_no_duplicates(self):
- # Create user
- user_ref = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user_ref = self.identity_api.create_user(user_ref)
- # Create project
- project_ref = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(
- project_ref['id'], project_ref)
- # Create 2 roles and give user each role in project
- for i in range(2):
- role_ref = unit.new_role_ref()
- self.role_api.create_role(role_ref['id'], role_ref)
- self.assignment_api.add_role_to_user_and_project(
- user_id=user_ref['id'],
- tenant_id=project_ref['id'],
- role_id=role_ref['id'])
- # Get the list of user_ids in project
- user_ids = self.assignment_api.list_user_ids_for_project(
- project_ref['id'])
- # Ensure the user is only returned once
- self.assertEqual(1, len(user_ids))
-
- def test_get_project_user_ids_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.list_user_ids_for_project,
- uuid.uuid4().hex)
-
- def test_list_role_assignments_unfiltered(self):
- """Test unfiltered listing of role assignments."""
- test_plan = {
- # Create a domain, with a user, group & project
- 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1},
- 'roles': 3},
- # Create a grant of each type (user/group on project/domain)
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}],
- 'tests': [
- # Check that we get back the 4 assignments
- {'params': {},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}]}
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_role_assignments_filtered_by_role(self):
- """Test listing of role assignments filtered by role ID."""
- test_plan = {
- # Create a user, group & project in the default domain
- 'entities': {'domains': {'id': CONF.identity.default_domain_id,
- 'users': 1, 'groups': 1, 'projects': 1},
- 'roles': 3},
- # Create a grant of each type (user/group on project/domain)
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}],
- 'tests': [
- # Check that when filtering by role, we only get back those
- # that match
- {'params': {'role': 2},
- 'results': [{'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}]}
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_group_role_assignment(self):
- # When a group role assignment is created and the role assignments are
- # listed then the group role assignment is included in the list.
-
- test_plan = {
- 'entities': {'domains': {'id': CONF.identity.default_domain_id,
- 'groups': 1, 'projects': 1},
- 'roles': 1},
- 'assignments': [{'group': 0, 'role': 0, 'project': 0}],
- 'tests': [
- {'params': {},
- 'results': [{'group': 0, 'role': 0, 'project': 0}]}
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_role_assignments_bad_role(self):
- assignment_list = self.assignment_api.list_role_assignments(
- role_id=uuid.uuid4().hex)
- self.assertEqual([], assignment_list)
-
- def test_add_duplicate_role_grant(self):
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn(self.role_admin['id'], roles_ref)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
- self.assertRaises(exception.Conflict,
- self.assignment_api.add_role_to_user_and_project,
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_admin['id'])
-
- def test_get_role_by_user_and_project_with_user_in_group(self):
- """Test for get role by user and project, user was added into a group.
-
- Test Plan:
-
- - Create a user, a project & a group, add this user to group
- - Create roles and grant them to user and project
- - Check the role list get by the user and project was as expected
-
- """
- user_ref = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user_ref = self.identity_api.create_user(user_ref)
-
- project_ref = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
-
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group_id = self.identity_api.create_group(group)['id']
- self.identity_api.add_user_to_group(user_ref['id'], group_id)
-
- role_ref_list = []
- for i in range(2):
- role_ref = unit.new_role_ref()
- self.role_api.create_role(role_ref['id'], role_ref)
- role_ref_list.append(role_ref)
-
- self.assignment_api.add_role_to_user_and_project(
- user_id=user_ref['id'],
- tenant_id=project_ref['id'],
- role_id=role_ref['id'])
-
- role_list = self.assignment_api.get_roles_for_user_and_project(
- user_ref['id'],
- project_ref['id'])
-
- self.assertEqual(set([r['id'] for r in role_ref_list]),
- set(role_list))
-
- def test_get_role_by_user_and_project(self):
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn(self.role_admin['id'], roles_ref)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertIn(self.role_admin['id'], roles_ref)
- self.assertNotIn('member', roles_ref)
-
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], 'member')
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertIn(self.role_admin['id'], roles_ref)
- self.assertIn('member', roles_ref)
-
- def test_get_roles_for_user_and_domain(self):
- """Test for getting roles for user on a domain.
-
- Test Plan:
-
- - Create a domain, with 2 users
- - Check no roles yet exit
- - Give user1 two roles on the domain, user2 one role
- - Get roles on user1 and the domain - maybe sure we only
- get back the 2 roles on user1
- - Delete both roles from user1
- - Check we get no roles back for user1 on domain
-
- """
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_user1 = unit.new_user_ref(domain_id=new_domain['id'])
- new_user1 = self.identity_api.create_user(new_user1)
- new_user2 = unit.new_user_ref(domain_id=new_domain['id'])
- new_user2 = self.identity_api.create_user(new_user2)
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user1['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- # Now create the grants (roles are defined in default_fixtures)
- self.assignment_api.create_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='member')
- self.assignment_api.create_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='other')
- self.assignment_api.create_grant(user_id=new_user2['id'],
- domain_id=new_domain['id'],
- role_id='admin')
- # Read back the roles for user1 on domain
- roles_ids = self.assignment_api.get_roles_for_user_and_domain(
- new_user1['id'], new_domain['id'])
- self.assertEqual(2, len(roles_ids))
- self.assertIn(self.role_member['id'], roles_ids)
- self.assertIn(self.role_other['id'], roles_ids)
-
- # Now delete both grants for user1
- self.assignment_api.delete_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='member')
- self.assignment_api.delete_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='other')
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user1['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
-
- def test_get_roles_for_user_and_domain_returns_not_found(self):
- """Test errors raised when getting roles for user on a domain.
-
- Test Plan:
-
- - Check non-existing user gives UserNotFound
- - Check non-existing domain gives DomainNotFound
-
- """
- new_domain = self._get_domain_fixture()
- new_user1 = unit.new_user_ref(domain_id=new_domain['id'])
- new_user1 = self.identity_api.create_user(new_user1)
-
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.get_roles_for_user_and_domain,
- uuid.uuid4().hex,
- new_domain['id'])
-
- self.assertRaises(exception.DomainNotFound,
- self.assignment_api.get_roles_for_user_and_domain,
- new_user1['id'],
- uuid.uuid4().hex)
-
- def test_get_roles_for_user_and_project_returns_not_found(self):
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.get_roles_for_user_and_project,
- uuid.uuid4().hex,
- self.tenant_bar['id'])
-
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.get_roles_for_user_and_project,
- self.user_foo['id'],
- uuid.uuid4().hex)
-
- def test_add_role_to_user_and_project_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.add_role_to_user_and_project,
- self.user_foo['id'],
- uuid.uuid4().hex,
- self.role_admin['id'])
-
- self.assertRaises(exception.RoleNotFound,
- self.assignment_api.add_role_to_user_and_project,
- self.user_foo['id'],
- self.tenant_bar['id'],
- uuid.uuid4().hex)
-
- def test_add_role_to_user_and_project_no_user(self):
- # If add_role_to_user_and_project and the user doesn't exist, then
- # no error.
- user_id_not_exist = uuid.uuid4().hex
- self.assignment_api.add_role_to_user_and_project(
- user_id_not_exist, self.tenant_bar['id'], self.role_admin['id'])
-
- def test_remove_role_from_user_and_project(self):
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], 'member')
- self.assignment_api.remove_role_from_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], 'member')
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn('member', roles_ref)
- self.assertRaises(exception.NotFound,
- self.assignment_api.
- remove_role_from_user_and_project,
- self.user_foo['id'],
- self.tenant_bar['id'],
- 'member')
-
- def test_get_role_grant_by_user_and_project(self):
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual(1, len(roles_ref))
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_admin['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'])
- self.assertIn(self.role_admin['id'],
- [role_ref['id'] for role_ref in roles_ref])
-
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'])
-
- roles_ref_ids = []
- for ref in roles_ref:
- roles_ref_ids.append(ref['id'])
- self.assertIn(self.role_admin['id'], roles_ref_ids)
- self.assertIn('member', roles_ref_ids)
-
- def test_remove_role_grant_from_user_and_project(self):
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'])
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- def test_get_role_assignment_by_project_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- group_id=uuid.uuid4().hex,
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- def test_get_role_assignment_by_domain_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- user_id=self.user_foo['id'],
- domain_id=self.domain_default['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- group_id=uuid.uuid4().hex,
- domain_id=self.domain_default['id'],
- role_id='member')
-
- def test_del_role_assignment_by_project_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=uuid.uuid4().hex,
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- def test_del_role_assignment_by_domain_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=self.user_foo['id'],
- domain_id=self.domain_default['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=uuid.uuid4().hex,
- domain_id=self.domain_default['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_project(self):
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_group = unit.new_group_ref(domain_id=new_domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_user = unit.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_domain(self):
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_group = unit.new_group_ref(domain_id=new_domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_user = unit.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
-
- self.assignment_api.create_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- def test_get_and_remove_correct_role_grant_from_a_mix(self):
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_project = unit.new_project_ref(domain_id=new_domain['id'])
- self.resource_api.create_project(new_project['id'], new_project)
- new_group = unit.new_group_ref(domain_id=new_domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_group2 = unit.new_group_ref(domain_id=new_domain['id'])
- new_group2 = self.identity_api.create_group(new_group2)
- new_user = unit.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- new_user2 = unit.new_user_ref(domain_id=new_domain['id'])
- new_user2 = self.identity_api.create_user(new_user2)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- # First check we have no grants
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- # Now add the grant we are going to test for, and some others as
- # well just to make sure we get back the right one
- self.assignment_api.create_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- self.assignment_api.create_grant(group_id=new_group2['id'],
- domain_id=new_domain['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(user_id=new_user2['id'],
- domain_id=new_domain['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(group_id=new_group['id'],
- project_id=new_project['id'],
- role_id=self.role_admin['id'])
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_user_and_domain(self):
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_user = unit.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=new_user['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(user_id=new_user['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=new_user['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
- group1_domain1_role = unit.new_role_ref()
- self.role_api.create_role(group1_domain1_role['id'],
- group1_domain1_role)
- group1_domain2_role = unit.new_role_ref()
- self.role_api.create_role(group1_domain2_role['id'],
- group1_domain2_role)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=group1_domain1_role['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=group1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertDictEqual(group1_domain1_role, roles_ref[0])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain2['id'])
- self.assertDictEqual(group1_domain2_role, roles_ref[0])
-
- self.assignment_api.delete_grant(group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=group1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=group1_domain2_role['id'])
-
- def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
- user1_domain1_role = unit.new_role_ref()
- self.role_api.create_role(user1_domain1_role['id'], user1_domain1_role)
- user1_domain2_role = unit.new_role_ref()
- self.role_api.create_role(user1_domain2_role['id'], user1_domain2_role)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=user1_domain1_role['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain2['id'],
- role_id=user1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertDictEqual(user1_domain1_role, roles_ref[0])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain2['id'])
- self.assertDictEqual(user1_domain2_role, roles_ref[0])
-
- self.assignment_api.delete_grant(user_id=user1['id'],
- domain_id=domain2['id'],
- role_id=user1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=user1['id'],
- domain_id=domain2['id'],
- role_id=user1_domain2_role['id'])
-
- def test_role_grant_by_group_and_cross_domain_project(self):
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
- role2 = unit.new_role_ref()
- self.role_api.create_role(role2['id'], role2)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- project1 = unit.new_project_ref(domain_id=domain2['id'])
- self.resource_api.create_project(project1['id'], project1)
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role2['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
-
- roles_ref_ids = []
- for ref in roles_ref:
- roles_ref_ids.append(ref['id'])
- self.assertIn(role1['id'], roles_ref_ids)
- self.assertIn(role2['id'], roles_ref_ids)
-
- self.assignment_api.delete_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- self.assertDictEqual(role2, roles_ref[0])
-
- def test_role_grant_by_user_and_cross_domain_project(self):
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
- role2 = unit.new_role_ref()
- self.role_api.create_role(role2['id'], role2)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- project1 = unit.new_project_ref(domain_id=domain2['id'])
- self.resource_api.create_project(project1['id'], project1)
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role2['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
-
- roles_ref_ids = []
- for ref in roles_ref:
- roles_ref_ids.append(ref['id'])
- self.assertIn(role1['id'], roles_ref_ids)
- self.assertIn(role2['id'], roles_ref_ids)
-
- self.assignment_api.delete_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- self.assertDictEqual(role2, roles_ref[0])
-
- def test_delete_user_grant_no_user(self):
- # Can delete a grant where the user doesn't exist.
- role = unit.new_role_ref()
- role_id = role['id']
- self.role_api.create_role(role_id, role)
-
- user_id = uuid.uuid4().hex
-
- self.assignment_api.create_grant(role_id, user_id=user_id,
- project_id=self.tenant_bar['id'])
-
- self.assignment_api.delete_grant(role_id, user_id=user_id,
- project_id=self.tenant_bar['id'])
-
- def test_delete_group_grant_no_group(self):
- # Can delete a grant where the group doesn't exist.
- role = unit.new_role_ref()
- role_id = role['id']
- self.role_api.create_role(role_id, role)
-
- group_id = uuid.uuid4().hex
-
- self.assignment_api.create_grant(role_id, group_id=group_id,
- project_id=self.tenant_bar['id'])
-
- self.assignment_api.delete_grant(role_id, group_id=group_id,
- project_id=self.tenant_bar['id'])
-
- def test_grant_crud_throws_exception_if_invalid_role(self):
- """Ensure RoleNotFound thrown if role does not exist."""
- def assert_role_not_found_exception(f, **kwargs):
- self.assertRaises(exception.RoleNotFound, f,
- role_id=uuid.uuid4().hex, **kwargs)
-
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user_resp = self.identity_api.create_user(user)
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group_resp = self.identity_api.create_group(group)
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project_resp = self.resource_api.create_project(project['id'], project)
-
- for manager_call in [self.assignment_api.create_grant,
- self.assignment_api.get_grant,
- self.assignment_api.delete_grant]:
- assert_role_not_found_exception(
- manager_call,
- user_id=user_resp['id'], project_id=project_resp['id'])
- assert_role_not_found_exception(
- manager_call,
- group_id=group_resp['id'], project_id=project_resp['id'])
- assert_role_not_found_exception(
- manager_call,
- user_id=user_resp['id'],
- domain_id=CONF.identity.default_domain_id)
- assert_role_not_found_exception(
- manager_call,
- group_id=group_resp['id'],
- domain_id=CONF.identity.default_domain_id)
-
- def test_multi_role_grant_by_user_group_on_project_domain(self):
- role_list = []
- for _ in range(10):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- group2 = unit.new_group_ref(domain_id=domain1['id'])
- group2 = self.identity_api.create_group(group2)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
-
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user1['id'],
- group2['id'])
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[3]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[4]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[5]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role_list[6]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role_list[7]['id'])
- roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[0], roles_ref)
- self.assertIn(role_list[1], roles_ref)
- roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[2], roles_ref)
- self.assertIn(role_list[3], roles_ref)
- roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[4], roles_ref)
- self.assertIn(role_list[5], roles_ref)
- roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[6], roles_ref)
- self.assertIn(role_list[7], roles_ref)
-
- # Now test the alternate way of getting back lists of grants,
- # where user and group roles are combined. These should match
- # the above results.
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(4, len(combined_list))
- self.assertIn(role_list[4]['id'], combined_list)
- self.assertIn(role_list[5]['id'], combined_list)
- self.assertIn(role_list[6]['id'], combined_list)
- self.assertIn(role_list[7]['id'], combined_list)
-
- combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
- user1['id'], domain1['id'])
- self.assertEqual(4, len(combined_role_list))
- self.assertIn(role_list[0]['id'], combined_role_list)
- self.assertIn(role_list[1]['id'], combined_role_list)
- self.assertIn(role_list[2]['id'], combined_role_list)
- self.assertIn(role_list[3]['id'], combined_role_list)
-
- def test_multi_group_grants_on_project_domain(self):
- """Test multiple group roles for user on project and domain.
-
- Test Plan:
-
- - Create 6 roles
- - Create a domain, with a project, user and two groups
- - Make the user a member of both groups
- - Check no roles yet exit
- - Assign a role to each user and both groups on both the
- project and domain
- - Get a list of effective roles for the user on both the
- project and domain, checking we get back the correct three
- roles
-
- """
- role_list = []
- for _ in range(6):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- group2 = unit.new_group_ref(domain_id=domain1['id'])
- group2 = self.identity_api.create_group(group2)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
-
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user1['id'],
- group2['id'])
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[3]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role_list[4]['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- project_id=project1['id'],
- role_id=role_list[5]['id'])
-
- # Read by the roles, ensuring we get the correct 3 roles for
- # both project and domain
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(3, len(combined_list))
- self.assertIn(role_list[3]['id'], combined_list)
- self.assertIn(role_list[4]['id'], combined_list)
- self.assertIn(role_list[5]['id'], combined_list)
-
- combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
- user1['id'], domain1['id'])
- self.assertEqual(3, len(combined_role_list))
- self.assertIn(role_list[0]['id'], combined_role_list)
- self.assertIn(role_list[1]['id'], combined_role_list)
- self.assertIn(role_list[2]['id'], combined_role_list)
-
- def test_delete_role_with_user_and_group_grants(self):
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- self.role_api.delete_role(role1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
-
- def test_list_role_assignment_by_domain(self):
- """Test listing of role assignment filtered by domain."""
- test_plan = {
- # A domain with 3 users, 1 group, a spoiler domain and 2 roles.
- 'entities': {'domains': [{'users': 3, 'groups': 1}, 1],
- 'roles': 2},
- # Users 1 & 2 are in the group
- 'group_memberships': [{'group': 0, 'users': [1, 2]}],
- # Assign a role for user 0 and the group
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'group': 0, 'role': 1, 'domain': 0}],
- 'tests': [
- # List all effective assignments for domain[0].
- # Should get one direct user role and user roles for each of
- # the users in the group.
- {'params': {'domain': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 1, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}},
- {'user': 2, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}}
- ]},
- # Using domain[1] should return nothing
- {'params': {'domain': 1, 'effective': True},
- 'results': []},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_role_assignment_by_user_with_domain_group_roles(self):
- """Test listing assignments by user, with group roles on a domain."""
- test_plan = {
- # A domain with 3 users, 3 groups, a spoiler domain
- # plus 3 roles.
- 'entities': {'domains': [{'users': 3, 'groups': 3}, 1],
- 'roles': 3},
- # Users 1 & 2 are in the group 0, User 1 also in group 1
- 'group_memberships': [{'group': 0, 'users': [0, 1]},
- {'group': 1, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'group': 0, 'role': 1, 'domain': 0},
- {'group': 1, 'role': 2, 'domain': 0},
- # ...and two spoiler assignments
- {'user': 1, 'role': 1, 'domain': 0},
- {'group': 2, 'role': 2, 'domain': 0}],
- 'tests': [
- # List all effective assignments for user[0].
- # Should get one direct user role and a user roles for each of
- # groups 0 and 1
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}},
- {'user': 0, 'role': 2, 'domain': 0,
- 'indirect': {'group': 1}}
- ]},
- # Adding domain[0] as a filter should return the same data
- {'params': {'user': 0, 'domain': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}},
- {'user': 0, 'role': 2, 'domain': 0,
- 'indirect': {'group': 1}}
- ]},
- # Using domain[1] should return nothing
- {'params': {'user': 0, 'domain': 1, 'effective': True},
- 'results': []},
- # Using user[2] should return nothing
- {'params': {'user': 2, 'domain': 0, 'effective': True},
- 'results': []},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_role_assignment_using_sourced_groups(self):
- """Test listing assignments when restricted by source groups."""
- test_plan = {
- # The default domain with 3 users, 3 groups, 3 projects,
- # plus 3 roles.
- 'entities': {'domains': {'id': CONF.identity.default_domain_id,
- 'users': 3, 'groups': 3, 'projects': 3},
- 'roles': 3},
- # Users 0 & 1 are in the group 0, User 0 also in group 1
- 'group_memberships': [{'group': 0, 'users': [0, 1]},
- {'group': 1, 'users': [0]}],
- # Spread the assignments around - we want to be able to show that
- # if sourced by group, assignments from other sources are excluded
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'group': 0, 'role': 1, 'project': 1},
- {'group': 1, 'role': 2, 'project': 0},
- {'group': 1, 'role': 2, 'project': 1},
- {'user': 2, 'role': 1, 'project': 1},
- {'group': 2, 'role': 2, 'project': 2}
- ],
- 'tests': [
- # List all effective assignments sourced from groups 0 and 1
- {'params': {'source_from_group_ids': [0, 1],
- 'effective': True},
- 'results': [{'group': 0, 'role': 1, 'project': 1},
- {'group': 1, 'role': 2, 'project': 0},
- {'group': 1, 'role': 2, 'project': 1}
- ]},
- # Adding a role a filter should further restrict the entries
- {'params': {'source_from_group_ids': [0, 1], 'role': 2,
- 'effective': True},
- 'results': [{'group': 1, 'role': 2, 'project': 0},
- {'group': 1, 'role': 2, 'project': 1}
- ]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_role_assignment_using_sourced_groups_with_domains(self):
- """Test listing domain assignments when restricted by source groups."""
- test_plan = {
- # A domain with 3 users, 3 groups, 3 projects, a second domain,
- # plus 3 roles.
- 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3},
- 1],
- 'roles': 3},
- # Users 0 & 1 are in the group 0, User 0 also in group 1
- 'group_memberships': [{'group': 0, 'users': [0, 1]},
- {'group': 1, 'users': [0]}],
- # Spread the assignments around - we want to be able to show that
- # if sourced by group, assignments from other sources are excluded
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'group': 0, 'role': 1, 'domain': 1},
- {'group': 1, 'role': 2, 'project': 0},
- {'group': 1, 'role': 2, 'project': 1},
- {'user': 2, 'role': 1, 'project': 1},
- {'group': 2, 'role': 2, 'project': 2}
- ],
- 'tests': [
- # List all effective assignments sourced from groups 0 and 1
- {'params': {'source_from_group_ids': [0, 1],
- 'effective': True},
- 'results': [{'group': 0, 'role': 1, 'domain': 1},
- {'group': 1, 'role': 2, 'project': 0},
- {'group': 1, 'role': 2, 'project': 1}
- ]},
- # Adding a role a filter should further restrict the entries
- {'params': {'source_from_group_ids': [0, 1], 'role': 1,
- 'effective': True},
- 'results': [{'group': 0, 'role': 1, 'domain': 1},
- ]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_role_assignment_fails_with_userid_and_source_groups(self):
- """Show we trap this unsupported internal combination of params."""
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- self.assertRaises(exception.UnexpectedError,
- self.assignment_api.list_role_assignments,
- effective=True,
- user_id=self.user_foo['id'],
- source_from_group_ids=[group['id']])
-
- def test_add_user_to_project(self):
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = self.assignment_api.list_projects_for_user(
- self.user_foo['id'])
- self.assertIn(self.tenant_baz, tenants)
-
- def test_add_user_to_project_missing_default_role(self):
- self.role_api.delete_role(CONF.member_role_id)
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- CONF.member_role_id)
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = (
- self.assignment_api.list_projects_for_user(self.user_foo['id']))
- self.assertIn(self.tenant_baz, tenants)
- default_role = self.role_api.get_role(CONF.member_role_id)
- self.assertIsNotNone(default_role)
-
- def test_add_user_to_project_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.add_user_to_project,
- uuid.uuid4().hex,
- self.user_foo['id'])
-
- def test_add_user_to_project_no_user(self):
- # If add_user_to_project and the user doesn't exist, then
- # no error.
- user_id_not_exist = uuid.uuid4().hex
- self.assignment_api.add_user_to_project(self.tenant_bar['id'],
- user_id_not_exist)
-
- def test_remove_user_from_project(self):
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = self.assignment_api.list_projects_for_user(
- self.user_foo['id'])
- self.assertNotIn(self.tenant_baz, tenants)
-
- def test_remove_user_from_project_race_delete_role(self):
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- self.assignment_api.add_role_to_user_and_project(
- tenant_id=self.tenant_baz['id'],
- user_id=self.user_foo['id'],
- role_id=self.role_other['id'])
-
- # Mock a race condition, delete a role after
- # get_roles_for_user_and_project() is called in
- # remove_user_from_project().
- roles = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_baz['id'])
- self.role_api.delete_role(self.role_other['id'])
- self.assignment_api.get_roles_for_user_and_project = mock.Mock(
- return_value=roles)
- self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = self.assignment_api.list_projects_for_user(
- self.user_foo['id'])
- self.assertNotIn(self.tenant_baz, tenants)
-
- def test_remove_user_from_project_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.remove_user_from_project,
- uuid.uuid4().hex,
- self.user_foo['id'])
-
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.remove_user_from_project,
- self.tenant_bar['id'],
- uuid.uuid4().hex)
-
- self.assertRaises(exception.NotFound,
- self.assignment_api.remove_user_from_project,
- self.tenant_baz['id'],
- self.user_foo['id'])
-
- def test_list_user_project_ids_returns_not_found(self):
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.list_projects_for_user,
- uuid.uuid4().hex)
-
- def test_delete_user_with_project_association(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_bar['id'],
- user['id'])
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.list_projects_for_user,
- user['id'])
-
- def test_delete_user_with_project_roles(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.assignment_api.add_role_to_user_and_project(
- user['id'],
- self.tenant_bar['id'],
- self.role_member['id'])
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.list_projects_for_user,
- user['id'])
-
- def test_delete_role_returns_not_found(self):
- self.assertRaises(exception.RoleNotFound,
- self.role_api.delete_role,
- uuid.uuid4().hex)
-
- def test_delete_project_with_role_assignments(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], project['id'], 'member')
- self.resource_api.delete_project(project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.list_user_ids_for_project,
- project['id'])
-
- def test_delete_role_check_role_grant(self):
- role = unit.new_role_ref()
- alt_role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- self.role_api.create_role(alt_role['id'], alt_role)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], role['id'])
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], alt_role['id'])
- self.role_api.delete_role(role['id'])
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn(role['id'], roles_ref)
- self.assertIn(alt_role['id'], roles_ref)
-
- def test_list_projects_for_user(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user1 = unit.new_user_ref(domain_id=domain['id'])
- user1 = self.identity_api.create_user(user1)
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(0, len(user_projects))
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_baz['id'],
- role_id=self.role_member['id'])
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(2, len(user_projects))
-
- def test_list_projects_for_user_with_grants(self):
- # Create two groups each with a role on a different project, and
- # make user1 a member of both groups. Both these new projects
- # should now be included, along with any direct user grants.
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user1 = unit.new_user_ref(domain_id=domain['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
- group2 = unit.new_group_ref(domain_id=domain['id'])
- group2 = self.identity_api.create_group(group2)
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
- self.identity_api.add_user_to_group(user1['id'], group1['id'])
- self.identity_api.add_user_to_group(user1['id'], group2['id'])
-
- # Create 3 grants, one user grant, the other two as group grants
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- project_id=project2['id'],
- role_id=self.role_admin['id'])
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(3, len(user_projects))
-
- def test_create_grant_no_user(self):
- # If call create_grant with a user that doesn't exist, doesn't fail.
- self.assignment_api.create_grant(
- self.role_other['id'],
- user_id=uuid.uuid4().hex,
- project_id=self.tenant_bar['id'])
-
- def test_create_grant_no_group(self):
- # If call create_grant with a group that doesn't exist, doesn't fail.
- self.assignment_api.create_grant(
- self.role_other['id'],
- group_id=uuid.uuid4().hex,
- project_id=self.tenant_bar['id'])
-
- def test_delete_group_removes_role_assignments(self):
- # When a group is deleted any role assignments for the group are
- # removed.
-
- MEMBER_ROLE_ID = 'member'
-
- def get_member_assignments():
- assignments = self.assignment_api.list_role_assignments()
- return [x for x in assignments if x['role_id'] == MEMBER_ROLE_ID]
-
- orig_member_assignments = get_member_assignments()
-
- # Create a group.
- new_group = unit.new_group_ref(
- domain_id=CONF.identity.default_domain_id)
- new_group = self.identity_api.create_group(new_group)
-
- # Create a project.
- new_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(new_project['id'], new_project)
-
- # Assign a role to the group.
- self.assignment_api.create_grant(
- group_id=new_group['id'], project_id=new_project['id'],
- role_id=MEMBER_ROLE_ID)
-
- # Delete the group.
- self.identity_api.delete_group(new_group['id'])
-
- # Check that the role assignment for the group is gone
- member_assignments = get_member_assignments()
-
- self.assertThat(member_assignments,
- matchers.Equals(orig_member_assignments))
-
- def test_get_roles_for_groups_on_domain(self):
- """Test retrieving group domain roles.
-
- Test Plan:
-
- - Create a domain, three groups and three roles
- - Assign one an inherited and the others a non-inherited group role
- to the domain
- - Ensure that only the non-inherited roles are returned on the domain
-
- """
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- group_list = []
- group_id_list = []
- role_list = []
- for _ in range(3):
- group = unit.new_group_ref(domain_id=domain1['id'])
- group = self.identity_api.create_group(group)
- group_list.append(group)
- group_id_list.append(group['id'])
-
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- # Assign the roles - one is inherited
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'],
- inherited_to_projects=True)
-
- # Now get the effective roles for the groups on the domain project. We
- # shouldn't get back the inherited role.
-
- role_refs = self.assignment_api.get_roles_for_groups(
- group_id_list, domain_id=domain1['id'])
-
- self.assertThat(role_refs, matchers.HasLength(2))
- self.assertIn(role_list[0], role_refs)
- self.assertIn(role_list[1], role_refs)
-
- def test_get_roles_for_groups_on_project(self):
- """Test retrieving group project roles.
-
- Test Plan:
-
- - Create two domains, two projects, six groups and six roles
- - Project1 is in Domain1, Project2 is in Domain2
- - Domain2/Project2 are spoilers
- - Assign a different direct group role to each project as well
- as both an inherited and non-inherited role to each domain
- - Get the group roles for Project 1 - depending on whether we have
- enabled inheritance, we should either get back just the direct role
- or both the direct one plus the inherited domain role from Domain 1
-
- """
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain2['id'])
- self.resource_api.create_project(project2['id'], project2)
- group_list = []
- group_id_list = []
- role_list = []
- for _ in range(6):
- group = unit.new_group_ref(domain_id=domain1['id'])
- group = self.identity_api.create_group(group)
- group_list.append(group)
- group_id_list.append(group['id'])
-
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- # Assign the roles - one inherited and one non-inherited on Domain1,
- # plus one on Project1
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- project_id=project1['id'],
- role_id=role_list[2]['id'])
-
- # ...and a duplicate set of spoiler assignments to Domain2/Project2
- self.assignment_api.create_grant(group_id=group_list[3]['id'],
- domain_id=domain2['id'],
- role_id=role_list[3]['id'])
- self.assignment_api.create_grant(group_id=group_list[4]['id'],
- domain_id=domain2['id'],
- role_id=role_list[4]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[5]['id'],
- project_id=project2['id'],
- role_id=role_list[5]['id'])
-
- # Now get the effective roles for all groups on the Project1. With
- # inheritance off, we should only get back the direct role.
-
- self.config_fixture.config(group='os_inherit', enabled=False)
- role_refs = self.assignment_api.get_roles_for_groups(
- group_id_list, project_id=project1['id'])
-
- self.assertThat(role_refs, matchers.HasLength(1))
- self.assertIn(role_list[2], role_refs)
-
- # With inheritance on, we should also get back the inherited role from
- # its owning domain.
-
- self.config_fixture.config(group='os_inherit', enabled=True)
- role_refs = self.assignment_api.get_roles_for_groups(
- group_id_list, project_id=project1['id'])
-
- self.assertThat(role_refs, matchers.HasLength(2))
- self.assertIn(role_list[1], role_refs)
- self.assertIn(role_list[2], role_refs)
-
- def test_list_domains_for_groups(self):
- """Test retrieving domains for a list of groups.
-
- Test Plan:
-
- - Create three domains, three groups and one role
- - Assign a non-inherited group role to two domains, and an inherited
- group role to the third
- - Ensure only the domains with non-inherited roles are returned
-
- """
- domain_list = []
- group_list = []
- group_id_list = []
- for _ in range(3):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- domain_list.append(domain)
-
- group = unit.new_group_ref(domain_id=domain['id'])
- group = self.identity_api.create_group(group)
- group_list.append(group)
- group_id_list.append(group['id'])
-
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
-
- # Assign the roles - one is inherited
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain_list[0]['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain_list[1]['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- domain_id=domain_list[2]['id'],
- role_id=role1['id'],
- inherited_to_projects=True)
-
- # Now list the domains that have roles for any of the 3 groups
- # We shouldn't get back domain[2] since that had an inherited role.
-
- domain_refs = (
- self.assignment_api.list_domains_for_groups(group_id_list))
-
- self.assertThat(domain_refs, matchers.HasLength(2))
- self.assertIn(domain_list[0], domain_refs)
- self.assertIn(domain_list[1], domain_refs)
-
- def test_list_projects_for_groups(self):
- """Test retrieving projects for a list of groups.
-
- Test Plan:
-
- - Create two domains, four projects, seven groups and seven roles
- - Project1-3 are in Domain1, Project4 is in Domain2
- - Domain2/Project4 are spoilers
- - Project1 and 2 have direct group roles, Project3 has no direct
- roles but should inherit a group role from Domain1
- - Get the projects for the group roles that are assigned to Project1
- Project2 and the inherited one on Domain1. Depending on whether we
- have enabled inheritance, we should either get back just the projects
- with direct roles (Project 1 and 2) or also Project3 due to its
- inherited role from Domain1.
-
- """
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- project1 = self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain1['id'])
- project2 = self.resource_api.create_project(project2['id'], project2)
- project3 = unit.new_project_ref(domain_id=domain1['id'])
- project3 = self.resource_api.create_project(project3['id'], project3)
- project4 = unit.new_project_ref(domain_id=domain2['id'])
- project4 = self.resource_api.create_project(project4['id'], project4)
- group_list = []
- role_list = []
- for _ in range(7):
- group = unit.new_group_ref(domain_id=domain1['id'])
- group = self.identity_api.create_group(group)
- group_list.append(group)
-
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- # Assign the roles - one inherited and one non-inherited on Domain1,
- # plus one on Project1 and Project2
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- project_id=project1['id'],
- role_id=role_list[2]['id'])
- self.assignment_api.create_grant(group_id=group_list[3]['id'],
- project_id=project2['id'],
- role_id=role_list[3]['id'])
-
- # ...and a few of spoiler assignments to Domain2/Project4
- self.assignment_api.create_grant(group_id=group_list[4]['id'],
- domain_id=domain2['id'],
- role_id=role_list[4]['id'])
- self.assignment_api.create_grant(group_id=group_list[5]['id'],
- domain_id=domain2['id'],
- role_id=role_list[5]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[6]['id'],
- project_id=project4['id'],
- role_id=role_list[6]['id'])
-
- # Now get the projects for the groups that have roles on Project1,
- # Project2 and the inherited role on Domain!. With inheritance off,
- # we should only get back the projects with direct role.
-
- self.config_fixture.config(group='os_inherit', enabled=False)
- group_id_list = [group_list[1]['id'], group_list[2]['id'],
- group_list[3]['id']]
- project_refs = (
- self.assignment_api.list_projects_for_groups(group_id_list))
-
- self.assertThat(project_refs, matchers.HasLength(2))
- self.assertIn(project1, project_refs)
- self.assertIn(project2, project_refs)
-
- # With inheritance on, we should also get back the Project3 due to the
- # inherited role from its owning domain.
-
- self.config_fixture.config(group='os_inherit', enabled=True)
- project_refs = (
- self.assignment_api.list_projects_for_groups(group_id_list))
-
- self.assertThat(project_refs, matchers.HasLength(3))
- self.assertIn(project1, project_refs)
- self.assertIn(project2, project_refs)
- self.assertIn(project3, project_refs)
-
- def test_update_role_no_name(self):
- # A user can update a role and not include the name.
-
- # description is picked just because it's not name.
- self.role_api.update_role(self.role_member['id'],
- {'description': uuid.uuid4().hex})
- # If the previous line didn't raise an exception then the test passes.
-
- def test_update_role_same_name(self):
- # A user can update a role and set the name to be the same as it was.
-
- self.role_api.update_role(self.role_member['id'],
- {'name': self.role_member['name']})
- # If the previous line didn't raise an exception then the test passes.
-
- def test_list_role_assignment_containing_names(self):
- # Create Refs
- new_role = unit.new_role_ref()
- new_domain = self._get_domain_fixture()
- new_user = unit.new_user_ref(domain_id=new_domain['id'])
- new_project = unit.new_project_ref(domain_id=new_domain['id'])
- new_group = unit.new_group_ref(domain_id=new_domain['id'])
- # Create entities
- new_role = self.role_api.create_role(new_role['id'], new_role)
- new_user = self.identity_api.create_user(new_user)
- new_group = self.identity_api.create_group(new_group)
- self.resource_api.create_project(new_project['id'], new_project)
- self.assignment_api.create_grant(user_id=new_user['id'],
- project_id=new_project['id'],
- role_id=new_role['id'])
- self.assignment_api.create_grant(group_id=new_group['id'],
- project_id=new_project['id'],
- role_id=new_role['id'])
- self.assignment_api.create_grant(domain_id=new_domain['id'],
- user_id=new_user['id'],
- role_id=new_role['id'])
- # Get the created assignments with the include_names flag
- _asgmt_prj = self.assignment_api.list_role_assignments(
- user_id=new_user['id'],
- project_id=new_project['id'],
- include_names=True)
- _asgmt_grp = self.assignment_api.list_role_assignments(
- group_id=new_group['id'],
- project_id=new_project['id'],
- include_names=True)
- _asgmt_dmn = self.assignment_api.list_role_assignments(
- domain_id=new_domain['id'],
- user_id=new_user['id'],
- include_names=True)
- # Make sure we can get back the correct number of assignments
- self.assertThat(_asgmt_prj, matchers.HasLength(1))
- self.assertThat(_asgmt_grp, matchers.HasLength(1))
- self.assertThat(_asgmt_dmn, matchers.HasLength(1))
- # get the first assignment
- first_asgmt_prj = _asgmt_prj[0]
- first_asgmt_grp = _asgmt_grp[0]
- first_asgmt_dmn = _asgmt_dmn[0]
- # Assert the names are correct in the project response
- self.assertEqual(new_project['name'],
- first_asgmt_prj['project_name'])
- self.assertEqual(new_project['domain_id'],
- first_asgmt_prj['project_domain_id'])
- self.assertEqual(new_user['name'],
- first_asgmt_prj['user_name'])
- self.assertEqual(new_user['domain_id'],
- first_asgmt_prj['user_domain_id'])
- self.assertEqual(new_role['name'],
- first_asgmt_prj['role_name'])
- # Assert the names are correct in the group response
- self.assertEqual(new_group['name'],
- first_asgmt_grp['group_name'])
- self.assertEqual(new_group['domain_id'],
- first_asgmt_grp['group_domain_id'])
- self.assertEqual(new_project['name'],
- first_asgmt_grp['project_name'])
- self.assertEqual(new_project['domain_id'],
- first_asgmt_grp['project_domain_id'])
- self.assertEqual(new_role['name'],
- first_asgmt_grp['role_name'])
- # Assert the names are correct in the domain response
- self.assertEqual(new_domain['name'],
- first_asgmt_dmn['domain_name'])
- self.assertEqual(new_user['name'],
- first_asgmt_dmn['user_name'])
- self.assertEqual(new_user['domain_id'],
- first_asgmt_dmn['user_domain_id'])
- self.assertEqual(new_role['name'],
- first_asgmt_dmn['role_name'])
-
- def test_list_role_assignment_does_not_contain_names(self):
- """Test names are not included with list role assignments.
-
- Scenario:
- - names are NOT included by default
- - names are NOT included when include_names=False
-
- """
- def assert_does_not_contain_names(assignment):
- first_asgmt_prj = assignment[0]
- self.assertNotIn('project_name', first_asgmt_prj)
- self.assertNotIn('project_domain_id', first_asgmt_prj)
- self.assertNotIn('user_name', first_asgmt_prj)
- self.assertNotIn('user_domain_id', first_asgmt_prj)
- self.assertNotIn('role_name', first_asgmt_prj)
-
- # Create Refs
- new_role = unit.new_role_ref()
- new_domain = self._get_domain_fixture()
- new_user = unit.new_user_ref(domain_id=new_domain['id'])
- new_project = unit.new_project_ref(domain_id=new_domain['id'])
- # Create entities
- new_role = self.role_api.create_role(new_role['id'], new_role)
- new_user = self.identity_api.create_user(new_user)
- self.resource_api.create_project(new_project['id'], new_project)
- self.assignment_api.create_grant(user_id=new_user['id'],
- project_id=new_project['id'],
- role_id=new_role['id'])
- # Get the created assignments with NO include_names flag
- role_assign_without_names = self.assignment_api.list_role_assignments(
- user_id=new_user['id'],
- project_id=new_project['id'])
- assert_does_not_contain_names(role_assign_without_names)
- # Get the created assignments with include_names=False
- role_assign_without_names = self.assignment_api.list_role_assignments(
- user_id=new_user['id'],
- project_id=new_project['id'],
- include_names=False)
- assert_does_not_contain_names(role_assign_without_names)
-
- def test_delete_user_assignments_user_same_id_as_group(self):
- """Test deleting user assignments when user_id == group_id.
-
- In this scenario, only user assignments must be deleted (i.e.
- USER_DOMAIN or USER_PROJECT).
-
- Test plan:
- * Create a user and a group with the same ID;
- * Create four roles and assign them to both user and group;
- * Delete all user assignments;
- * Group assignments must stay intact.
- """
- # Create a common ID
- common_id = uuid.uuid4().hex
- # Create a project
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project = self.resource_api.create_project(project['id'], project)
- # Create a user
- user = unit.new_user_ref(id=common_id,
- domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.driver.create_user(common_id, user)
- self.assertEqual(common_id, user['id'])
- # Create a group
- group = unit.new_group_ref(id=common_id,
- domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.driver.create_group(common_id, group)
- self.assertEqual(common_id, group['id'])
- # Create four roles
- roles = []
- for _ in range(4):
- role = unit.new_role_ref()
- roles.append(self.role_api.create_role(role['id'], role))
- # Assign roles for user
- self.assignment_api.driver.create_grant(
- user_id=user['id'], domain_id=CONF.identity.default_domain_id,
- role_id=roles[0]['id'])
- self.assignment_api.driver.create_grant(user_id=user['id'],
- project_id=project['id'],
- role_id=roles[1]['id'])
- # Assign roles for group
- self.assignment_api.driver.create_grant(
- group_id=group['id'], domain_id=CONF.identity.default_domain_id,
- role_id=roles[2]['id'])
- self.assignment_api.driver.create_grant(group_id=group['id'],
- project_id=project['id'],
- role_id=roles[3]['id'])
- # Make sure they were assigned
- user_assignments = self.assignment_api.list_role_assignments(
- user_id=user['id'])
- self.assertThat(user_assignments, matchers.HasLength(2))
- group_assignments = self.assignment_api.list_role_assignments(
- group_id=group['id'])
- self.assertThat(group_assignments, matchers.HasLength(2))
- # Delete user assignments
- self.assignment_api.delete_user_assignments(user_id=user['id'])
- # Assert only user assignments were deleted
- user_assignments = self.assignment_api.list_role_assignments(
- user_id=user['id'])
- self.assertThat(user_assignments, matchers.HasLength(0))
- group_assignments = self.assignment_api.list_role_assignments(
- group_id=group['id'])
- self.assertThat(group_assignments, matchers.HasLength(2))
- # Make sure these remaining assignments are group-related
- for assignment in group_assignments:
- self.assertThat(assignment.keys(), matchers.Contains('group_id'))
-
- def test_delete_group_assignments_group_same_id_as_user(self):
- """Test deleting group assignments when group_id == user_id.
-
- In this scenario, only group assignments must be deleted (i.e.
- GROUP_DOMAIN or GROUP_PROJECT).
-
- Test plan:
- * Create a group and a user with the same ID;
- * Create four roles and assign them to both group and user;
- * Delete all group assignments;
- * User assignments must stay intact.
- """
- # Create a common ID
- common_id = uuid.uuid4().hex
- # Create a project
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project = self.resource_api.create_project(project['id'], project)
- # Create a user
- user = unit.new_user_ref(id=common_id,
- domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.driver.create_user(common_id, user)
- self.assertEqual(common_id, user['id'])
- # Create a group
- group = unit.new_group_ref(id=common_id,
- domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.driver.create_group(common_id, group)
- self.assertEqual(common_id, group['id'])
- # Create four roles
- roles = []
- for _ in range(4):
- role = unit.new_role_ref()
- roles.append(self.role_api.create_role(role['id'], role))
- # Assign roles for user
- self.assignment_api.driver.create_grant(
- user_id=user['id'], domain_id=CONF.identity.default_domain_id,
- role_id=roles[0]['id'])
- self.assignment_api.driver.create_grant(user_id=user['id'],
- project_id=project['id'],
- role_id=roles[1]['id'])
- # Assign roles for group
- self.assignment_api.driver.create_grant(
- group_id=group['id'], domain_id=CONF.identity.default_domain_id,
- role_id=roles[2]['id'])
- self.assignment_api.driver.create_grant(group_id=group['id'],
- project_id=project['id'],
- role_id=roles[3]['id'])
- # Make sure they were assigned
- user_assignments = self.assignment_api.list_role_assignments(
- user_id=user['id'])
- self.assertThat(user_assignments, matchers.HasLength(2))
- group_assignments = self.assignment_api.list_role_assignments(
- group_id=group['id'])
- self.assertThat(group_assignments, matchers.HasLength(2))
- # Delete group assignments
- self.assignment_api.delete_group_assignments(group_id=group['id'])
- # Assert only group assignments were deleted
- group_assignments = self.assignment_api.list_role_assignments(
- group_id=group['id'])
- self.assertThat(group_assignments, matchers.HasLength(0))
- user_assignments = self.assignment_api.list_role_assignments(
- user_id=user['id'])
- self.assertThat(user_assignments, matchers.HasLength(2))
- # Make sure these remaining assignments are user-related
- for assignment in group_assignments:
- self.assertThat(assignment.keys(), matchers.Contains('user_id'))
-
- def test_remove_foreign_assignments_when_deleting_a_domain(self):
- # A user and a group are in default domain and have assigned a role on
- # two new domains. This test makes sure that when one of the new
- # domains is deleted, the role assignments for the user and the group
- # from the default domain are deleted only on that domain.
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
-
- role = unit.new_role_ref()
- role = self.role_api.create_role(role['id'], role)
-
- new_domains = [unit.new_domain_ref(), unit.new_domain_ref()]
- for new_domain in new_domains:
- self.resource_api.create_domain(new_domain['id'], new_domain)
-
- self.assignment_api.create_grant(group_id=group['id'],
- domain_id=new_domain['id'],
- role_id=role['id'])
- self.assignment_api.create_grant(user_id=self.user_two['id'],
- domain_id=new_domain['id'],
- role_id=role['id'])
-
- # Check there are 4 role assignments for that role
- role_assignments = self.assignment_api.list_role_assignments(
- role_id=role['id'])
- self.assertThat(role_assignments, matchers.HasLength(4))
-
- # Delete first new domain and check only 2 assignments were left
- self.resource_api.update_domain(new_domains[0]['id'],
- {'enabled': False})
- self.resource_api.delete_domain(new_domains[0]['id'])
-
- role_assignments = self.assignment_api.list_role_assignments(
- role_id=role['id'])
- self.assertThat(role_assignments, matchers.HasLength(2))
-
- # Delete second new domain and check no assignments were left
- self.resource_api.update_domain(new_domains[1]['id'],
- {'enabled': False})
- self.resource_api.delete_domain(new_domains[1]['id'])
-
- role_assignments = self.assignment_api.list_role_assignments(
- role_id=role['id'])
- self.assertEqual([], role_assignments)
-
-
-class InheritanceTests(AssignmentTestHelperMixin):
-
- def test_role_assignments_user_domain_to_project_inheritance(self):
- test_plan = {
- 'entities': {'domains': {'users': 2, 'projects': 1},
- 'roles': 3},
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True},
- {'user': 1, 'role': 1, 'project': 0}],
- 'tests': [
- # List all direct assignments for user[0]
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': 'projects'}]},
- # Now the effective ones - so the domain role should turn into
- # a project role
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0}}]},
- # Narrow down to effective roles for user[0] and project[0]
- {'params': {'user': 0, 'project': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0}}]}
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=True)
- self.execute_assignment_plan(test_plan)
-
- def test_inherited_role_assignments_excluded_if_os_inherit_false(self):
- test_plan = {
- 'entities': {'domains': {'users': 2, 'groups': 1, 'projects': 1},
- 'roles': 4},
- 'group_memberships': [{'group': 0, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True},
- {'user': 1, 'role': 1, 'project': 0},
- {'group': 0, 'role': 3, 'project': 0}],
- 'tests': [
- # List all direct assignments for user[0], since os-inherit is
- # disabled, we should not see the inherited role
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0}]},
- # Same in effective mode - inherited roles should not be
- # included or expanded...but the group role should now
- # turn up as a user role, since group expansion is not
- # part of os-inherit.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'group': 0}}]},
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=False)
- self.execute_assignment_plan(test_plan)
-
- def _test_crud_inherited_and_direct_assignment(self, **kwargs):
- """Tests inherited and direct assignments for the actor and target
-
- Ensure it is possible to create both inherited and direct role
- assignments for the same actor on the same target. The actor and the
- target are specified in the kwargs as ('user_id' or 'group_id') and
- ('project_id' or 'domain_id'), respectively.
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- # Create a new role to avoid assignments loaded from default fixtures
- role = unit.new_role_ref()
- role = self.role_api.create_role(role['id'], role)
-
- # Define the common assignment entity
- assignment_entity = {'role_id': role['id']}
- assignment_entity.update(kwargs)
-
- # Define assignments under test
- direct_assignment_entity = assignment_entity.copy()
- inherited_assignment_entity = assignment_entity.copy()
- inherited_assignment_entity['inherited_to_projects'] = 'projects'
-
- # Create direct assignment and check grants
- self.assignment_api.create_grant(inherited_to_projects=False,
- **assignment_entity)
-
- grants = self.assignment_api.list_role_assignments(role_id=role['id'])
- self.assertThat(grants, matchers.HasLength(1))
- self.assertIn(direct_assignment_entity, grants)
-
- # Now add inherited assignment and check grants
- self.assignment_api.create_grant(inherited_to_projects=True,
- **assignment_entity)
-
- grants = self.assignment_api.list_role_assignments(role_id=role['id'])
- self.assertThat(grants, matchers.HasLength(2))
- self.assertIn(direct_assignment_entity, grants)
- self.assertIn(inherited_assignment_entity, grants)
-
- # Delete both and check grants
- self.assignment_api.delete_grant(inherited_to_projects=False,
- **assignment_entity)
- self.assignment_api.delete_grant(inherited_to_projects=True,
- **assignment_entity)
-
- grants = self.assignment_api.list_role_assignments(role_id=role['id'])
- self.assertEqual([], grants)
-
- def test_crud_inherited_and_direct_assignment_for_user_on_domain(self):
- self._test_crud_inherited_and_direct_assignment(
- user_id=self.user_foo['id'],
- domain_id=CONF.identity.default_domain_id)
-
- def test_crud_inherited_and_direct_assignment_for_group_on_domain(self):
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
-
- self._test_crud_inherited_and_direct_assignment(
- group_id=group['id'], domain_id=CONF.identity.default_domain_id)
-
- def test_crud_inherited_and_direct_assignment_for_user_on_project(self):
- self._test_crud_inherited_and_direct_assignment(
- user_id=self.user_foo['id'], project_id=self.tenant_baz['id'])
-
- def test_crud_inherited_and_direct_assignment_for_group_on_project(self):
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
-
- self._test_crud_inherited_and_direct_assignment(
- group_id=group['id'], project_id=self.tenant_baz['id'])
-
- def test_inherited_role_grants_for_user(self):
- """Test inherited user roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create 3 roles
- - Create a domain, with a project and a user
- - Check no roles yet exit
- - Assign a direct user role to the project and a (non-inherited)
- user role to the domain
- - Get a list of effective roles - should only get the one direct role
- - Now add an inherited user role to the domain
- - Get a list of effective roles - should have two roles, one
- direct and one by virtue of the inherited user role
- - Also get effective roles for the domain - the role marked as
- inherited should not show up
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- role_list = []
- for _ in range(3):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
-
- # Create the first two roles - the domain one is not inherited
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
-
- # Now get the effective roles for the user and project, this
- # should only include the direct role assignment on the project
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(1, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
-
- # Now add an inherited role on the domain
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'],
- inherited_to_projects=True)
-
- # Now get the effective roles for the user and project again, this
- # should now include the inherited role on the domain
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(2, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
- self.assertIn(role_list[2]['id'], combined_list)
-
- # Finally, check that the inherited role does not appear as a valid
- # directly assigned role on the domain itself
- combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
- user1['id'], domain1['id'])
- self.assertEqual(1, len(combined_role_list))
- self.assertIn(role_list[1]['id'], combined_role_list)
-
- # TODO(henry-nash): The test above uses get_roles_for_user_and_project
- # and get_roles_for_user_and_domain, which will, in a subsequent patch,
- # be re-implemented to simply call list_role_assignments (see blueprint
- # remove-role-metadata).
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once get_roles_for_user_and
- # project/domain have been re-implemented then the manual tests above
- # can be refactored to simply ensure it gives the same answers.
- test_plan = {
- # A domain with a user & project, plus 3 roles.
- 'entities': {'domains': {'users': 1, 'projects': 1},
- 'roles': 3},
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'domain': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] on project[0].
- # Should get one direct role and one inherited role.
- {'params': {'user': 0, 'project': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0}}]},
- # Ensure effective mode on the domain does not list the
- # inherited role on that domain
- {'params': {'user': 0, 'domain': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 1, 'domain': 0}]},
- # Ensure non-inherited mode also only returns the non-inherited
- # role on the domain
- {'params': {'user': 0, 'domain': 0, 'inherited': False},
- 'results': [{'user': 0, 'role': 1, 'domain': 0}]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_inherited_role_grants_for_group(self):
- """Test inherited group roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create 4 roles
- - Create a domain, with a project, user and two groups
- - Make the user a member of both groups
- - Check no roles yet exit
- - Assign a direct user role to the project and a (non-inherited)
- group role on the domain
- - Get a list of effective roles - should only get the one direct role
- - Now add two inherited group roles to the domain
- - Get a list of effective roles - should have three roles, one
- direct and two by virtue of inherited group roles
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- role_list = []
- for _ in range(4):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- group2 = unit.new_group_ref(domain_id=domain1['id'])
- group2 = self.identity_api.create_group(group2)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
-
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user1['id'],
- group2['id'])
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
-
- # Create two roles - the domain one is not inherited
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
-
- # Now get the effective roles for the user and project, this
- # should only include the direct role assignment on the project
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(1, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
-
- # Now add to more group roles, both inherited, to the domain
- self.assignment_api.create_grant(group_id=group2['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group2['id'],
- domain_id=domain1['id'],
- role_id=role_list[3]['id'],
- inherited_to_projects=True)
-
- # Now get the effective roles for the user and project again, this
- # should now include the inherited roles on the domain
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(3, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
- self.assertIn(role_list[2]['id'], combined_list)
- self.assertIn(role_list[3]['id'], combined_list)
-
- # TODO(henry-nash): The test above uses get_roles_for_user_and_project
- # which will, in a subsequent patch, be re-implemented to simply call
- # list_role_assignments (see blueprint remove-role-metadata).
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once
- # get_roles_for_user_and_project has been re-implemented then the
- # manual tests above can be refactored to simply ensure it gives
- # the same answers.
- test_plan = {
- # A domain with a user and project, 2 groups, plus 4 roles.
- 'entities': {'domains': {'users': 1, 'projects': 1, 'groups': 2},
- 'roles': 4},
- 'group_memberships': [{'group': 0, 'users': [0]},
- {'group': 1, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'group': 0, 'role': 1, 'domain': 0},
- {'group': 1, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True},
- {'group': 1, 'role': 3, 'domain': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] on project[0].
- # Should get one direct role and both inherited roles, but
- # not the direct one on domain[0], even though user[0] is
- # in group[0].
- {'params': {'user': 0, 'project': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0, 'group': 1}},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'domain': 0, 'group': 1}}]}
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_projects_for_user_with_inherited_grants(self):
- """Test inherited user roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create a domain, with two projects and a user
- - Assign an inherited user role on the domain, as well as a direct
- user role to a separate project in a different domain
- - Get a list of projects for user, should return all three projects
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user1 = unit.new_user_ref(domain_id=domain['id'])
- user1 = self.identity_api.create_user(user1)
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
-
- # Create 2 grants, one on a project and one inherited grant
- # on the domain
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Should get back all three projects, one by virtue of the direct
- # grant, plus both projects in the domain
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(3, len(user_projects))
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemented to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with 1 project, plus a second domain with 2 projects,
- # as well as a user. Also, create 2 roles.
- 'entities': {'domains': [{'projects': 1},
- {'users': 1, 'projects': 2}],
- 'roles': 2},
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'domain': 1,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0]
- # Should get one direct role plus one inherited role for each
- # project in domain
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'domain': 1}},
- {'user': 0, 'role': 1, 'project': 2,
- 'indirect': {'domain': 1}}]}
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_projects_for_user_with_inherited_user_project_grants(self):
- """Test inherited role assignments for users on nested projects.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create a hierarchy of projects with one root and one leaf project
- - Assign an inherited user role on root project
- - Assign a non-inherited user role on root project
- - Get a list of projects for user, should return both projects
- - Disable OS-INHERIT extension
- - Get a list of projects for user, should return only root project
-
- """
- # Enable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=True)
- root_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- root_project = self.resource_api.create_project(root_project['id'],
- root_project)
- leaf_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- parent_id=root_project['id'])
- leaf_project = self.resource_api.create_project(leaf_project['id'],
- leaf_project)
-
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
-
- # Grant inherited user role
- self.assignment_api.create_grant(user_id=user['id'],
- project_id=root_project['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Grant non-inherited user role
- self.assignment_api.create_grant(user_id=user['id'],
- project_id=root_project['id'],
- role_id=self.role_member['id'])
- # Should get back both projects: because the direct role assignment for
- # the root project and inherited role assignment for leaf project
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(2, len(user_projects))
- self.assertIn(root_project, user_projects)
- self.assertIn(leaf_project, user_projects)
-
- # Disable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Should get back just root project - due the direct role assignment
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(1, len(user_projects))
- self.assertIn(root_project, user_projects)
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemented to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with a project and sub-project, plus a user.
- # Also, create 2 roles.
- 'entities': {
- 'domains': {'id': CONF.identity.default_domain_id, 'users': 1,
- 'projects': {'project': 1}},
- 'roles': 2},
- # A direct role and an inherited role on the parent
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] - should get back
- # one direct role plus one inherited role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'project': 0}}]}
- ]
- }
-
- test_plan_with_os_inherit_disabled = {
- 'tests': [
- # List all effective assignments for user[0] - should only get
- # back the one direct role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0}]}
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=True)
- test_data = self.execute_assignment_plan(test_plan)
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Pass the existing test data in to allow execution of 2nd test plan
- self.execute_assignment_cases(
- test_plan_with_os_inherit_disabled, test_data)
-
- def test_list_projects_for_user_with_inherited_group_grants(self):
- """Test inherited group roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create two domains, each with two projects
- - Create a user and group
- - Make the user a member of the group
- - Assign a user role two projects, an inherited
- group role to one domain and an inherited regular role on
- the other domain
- - Get a list of projects for user, should return both pairs of projects
- from the domain, plus the one separate project
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
- project3 = unit.new_project_ref(domain_id=domain2['id'])
- self.resource_api.create_project(project3['id'], project3)
- project4 = unit.new_project_ref(domain_id=domain2['id'])
- self.resource_api.create_project(project4['id'], project4)
- user1 = unit.new_user_ref(domain_id=domain['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
- self.identity_api.add_user_to_group(user1['id'], group1['id'])
-
- # Create 4 grants:
- # - one user grant on a project in domain2
- # - one user grant on a project in the default domain
- # - one inherited user grant on domain
- # - one inherited group grant on domain2
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project3['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Should get back all five projects, but without a duplicate for
- # project3 (since it has both a direct user role and an inherited role)
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(5, len(user_projects))
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemented to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with a 1 project, plus a second domain with 2 projects,
- # as well as a user & group and a 3rd domain with 2 projects.
- # Also, created 2 roles.
- 'entities': {'domains': [{'projects': 1},
- {'users': 1, 'groups': 1, 'projects': 2},
- {'projects': 2}],
- 'roles': 2},
- 'group_memberships': [{'group': 0, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 0, 'project': 3},
- {'user': 0, 'role': 1, 'domain': 1,
- 'inherited_to_projects': True},
- {'user': 0, 'role': 1, 'domain': 2,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0]
- # Should get back both direct roles plus roles on both projects
- # from each domain. Duplicates should not be filtered out.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 3},
- {'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'domain': 1}},
- {'user': 0, 'role': 1, 'project': 2,
- 'indirect': {'domain': 1}},
- {'user': 0, 'role': 1, 'project': 3,
- 'indirect': {'domain': 2}},
- {'user': 0, 'role': 1, 'project': 4,
- 'indirect': {'domain': 2}}]}
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_list_projects_for_user_with_inherited_group_project_grants(self):
- """Test inherited role assignments for groups on nested projects.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create a hierarchy of projects with one root and one leaf project
- - Assign an inherited group role on root project
- - Assign a non-inherited group role on root project
- - Get a list of projects for user, should return both projects
- - Disable OS-INHERIT extension
- - Get a list of projects for user, should return only root project
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- root_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- root_project = self.resource_api.create_project(root_project['id'],
- root_project)
- leaf_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- parent_id=root_project['id'])
- leaf_project = self.resource_api.create_project(leaf_project['id'],
- leaf_project)
-
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
-
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- self.identity_api.add_user_to_group(user['id'], group['id'])
-
- # Grant inherited group role
- self.assignment_api.create_grant(group_id=group['id'],
- project_id=root_project['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Grant non-inherited group role
- self.assignment_api.create_grant(group_id=group['id'],
- project_id=root_project['id'],
- role_id=self.role_member['id'])
- # Should get back both projects: because the direct role assignment for
- # the root project and inherited role assignment for leaf project
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(2, len(user_projects))
- self.assertIn(root_project, user_projects)
- self.assertIn(leaf_project, user_projects)
-
- # Disable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Should get back just root project - due the direct role assignment
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(1, len(user_projects))
- self.assertIn(root_project, user_projects)
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemented to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with a project ans sub-project, plus a user.
- # Also, create 2 roles.
- 'entities': {
- 'domains': {'id': CONF.identity.default_domain_id, 'users': 1,
- 'groups': 1,
- 'projects': {'project': 1}},
- 'roles': 2},
- 'group_memberships': [{'group': 0, 'users': [0]}],
- # A direct role and an inherited role on the parent
- 'assignments': [{'group': 0, 'role': 0, 'project': 0},
- {'group': 0, 'role': 1, 'project': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] - should get back
- # one direct role plus one inherited role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0,
- 'indirect': {'group': 0}},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'group': 0, 'project': 0}}]}
- ]
- }
-
- test_plan_with_os_inherit_disabled = {
- 'tests': [
- # List all effective assignments for user[0] - should only get
- # back the one direct role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0,
- 'indirect': {'group': 0}}]}
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=True)
- test_data = self.execute_assignment_plan(test_plan)
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Pass the existing test data in to allow execution of 2nd test plan
- self.execute_assignment_cases(
- test_plan_with_os_inherit_disabled, test_data)
-
- def test_list_assignments_for_tree(self):
- """Test we correctly list direct assignments for a tree"""
- # Enable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- test_plan = {
- # Create a domain with a project hierarchy 3 levels deep:
- #
- # project 0
- # ____________|____________
- # | |
- # project 1 project 4
- # ______|_____ ______|_____
- # | | | |
- # project 2 project 3 project 5 project 6
- #
- # Also, create 1 user and 4 roles.
- 'entities': {
- 'domains': {
- 'projects': {'project': [{'project': 2},
- {'project': 2}]},
- 'users': 1},
- 'roles': 4},
- 'assignments': [
- # Direct assignment to projects 1 and 2
- {'user': 0, 'role': 0, 'project': 1},
- {'user': 0, 'role': 1, 'project': 2},
- # Also an inherited assignment on project 1
- {'user': 0, 'role': 2, 'project': 1,
- 'inherited_to_projects': True},
- # ...and two spoiler assignments, one to the root and one
- # to project 4
- {'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 3, 'project': 4}],
- 'tests': [
- # List all assignments for project 1 and its subtree.
- {'params': {'project': 1, 'include_subtree': True},
- 'results': [
- # Only the actual assignments should be returned, no
- # expansion of inherited assignments
- {'user': 0, 'role': 0, 'project': 1},
- {'user': 0, 'role': 1, 'project': 2},
- {'user': 0, 'role': 2, 'project': 1,
- 'inherited_to_projects': 'projects'}]}
- ]
- }
-
- self.execute_assignment_plan(test_plan)
-
- def test_list_effective_assignments_for_tree(self):
- """Test we correctly list effective assignments for a tree"""
- # Enable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- test_plan = {
- # Create a domain with a project hierarchy 3 levels deep:
- #
- # project 0
- # ____________|____________
- # | |
- # project 1 project 4
- # ______|_____ ______|_____
- # | | | |
- # project 2 project 3 project 5 project 6
- #
- # Also, create 1 user and 4 roles.
- 'entities': {
- 'domains': {
- 'projects': {'project': [{'project': 2},
- {'project': 2}]},
- 'users': 1},
- 'roles': 4},
- 'assignments': [
- # An inherited assignment on project 1
- {'user': 0, 'role': 1, 'project': 1,
- 'inherited_to_projects': True},
- # A direct assignment to project 2
- {'user': 0, 'role': 2, 'project': 2},
- # ...and two spoiler assignments, one to the root and one
- # to project 4
- {'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 3, 'project': 4}],
- 'tests': [
- # List all effective assignments for project 1 and its subtree.
- {'params': {'project': 1, 'effective': True,
- 'include_subtree': True},
- 'results': [
- # The inherited assignment on project 1 should appear only
- # on its children
- {'user': 0, 'role': 1, 'project': 2,
- 'indirect': {'project': 1}},
- {'user': 0, 'role': 1, 'project': 3,
- 'indirect': {'project': 1}},
- # And finally the direct assignment on project 2
- {'user': 0, 'role': 2, 'project': 2}]}
- ]
- }
-
- self.execute_assignment_plan(test_plan)
-
- def test_list_effective_assignments_for_tree_with_mixed_assignments(self):
- """Test that we correctly combine assignments for a tree.
-
- In this test we want to ensure that when asking for a list of
- assignments in a subtree, any assignments inherited from above the
- subtree are correctly combined with any assignments within the subtree
- itself.
-
- """
- # Enable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- test_plan = {
- # Create a domain with a project hierarchy 3 levels deep:
- #
- # project 0
- # ____________|____________
- # | |
- # project 1 project 4
- # ______|_____ ______|_____
- # | | | |
- # project 2 project 3 project 5 project 6
- #
- # Also, create 2 users, 1 group and 4 roles.
- 'entities': {
- 'domains': {
- 'projects': {'project': [{'project': 2},
- {'project': 2}]},
- 'users': 2, 'groups': 1},
- 'roles': 4},
- # Both users are part of the same group
- 'group_memberships': [{'group': 0, 'users': [0, 1]}],
- # We are going to ask for listing of assignment on project 1 and
- # it's subtree. So first we'll add two inherited assignments above
- # this (one user and one for a group that contains this user).
- 'assignments': [{'user': 0, 'role': 0, 'project': 0,
- 'inherited_to_projects': True},
- {'group': 0, 'role': 1, 'project': 0,
- 'inherited_to_projects': True},
- # Now an inherited assignment on project 1 itself,
- # which should ONLY show up on its children
- {'user': 0, 'role': 2, 'project': 1,
- 'inherited_to_projects': True},
- # ...and a direct assignment on one of those
- # children
- {'user': 0, 'role': 3, 'project': 2},
- # The rest are spoiler assignments
- {'user': 0, 'role': 2, 'project': 5},
- {'user': 0, 'role': 3, 'project': 4}],
- 'tests': [
- # List all effective assignments for project 1 and its subtree.
- {'params': {'project': 1, 'user': 0, 'effective': True,
- 'include_subtree': True},
- 'results': [
- # First, we should see the inherited user assignment from
- # project 0 on all projects in the subtree
- {'user': 0, 'role': 0, 'project': 1,
- 'indirect': {'project': 0}},
- {'user': 0, 'role': 0, 'project': 2,
- 'indirect': {'project': 0}},
- {'user': 0, 'role': 0, 'project': 3,
- 'indirect': {'project': 0}},
- # Also the inherited group assignment from project 0 on
- # the subtree
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'project': 0, 'group': 0}},
- {'user': 0, 'role': 1, 'project': 2,
- 'indirect': {'project': 0, 'group': 0}},
- {'user': 0, 'role': 1, 'project': 3,
- 'indirect': {'project': 0, 'group': 0}},
- # The inherited assignment on project 1 should appear only
- # on its children
- {'user': 0, 'role': 2, 'project': 2,
- 'indirect': {'project': 1}},
- {'user': 0, 'role': 2, 'project': 3,
- 'indirect': {'project': 1}},
- # And finally the direct assignment on project 2
- {'user': 0, 'role': 3, 'project': 2}]}
- ]
- }
-
- self.execute_assignment_plan(test_plan)
-
- def test_list_effective_assignments_for_tree_with_domain_assignments(self):
- """Test we correctly honor domain inherited assignments on the tree"""
- # Enable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- test_plan = {
- # Create a domain with a project hierarchy 3 levels deep:
- #
- # project 0
- # ____________|____________
- # | |
- # project 1 project 4
- # ______|_____ ______|_____
- # | | | |
- # project 2 project 3 project 5 project 6
- #
- # Also, create 1 user and 4 roles.
- 'entities': {
- 'domains': {
- 'projects': {'project': [{'project': 2},
- {'project': 2}]},
- 'users': 1},
- 'roles': 4},
- 'assignments': [
- # An inherited assignment on the domain (which should be
- # applied to all the projects)
- {'user': 0, 'role': 1, 'domain': 0,
- 'inherited_to_projects': True},
- # A direct assignment to project 2
- {'user': 0, 'role': 2, 'project': 2},
- # ...and two spoiler assignments, one to the root and one
- # to project 4
- {'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 3, 'project': 4}],
- 'tests': [
- # List all effective assignments for project 1 and its subtree.
- {'params': {'project': 1, 'effective': True,
- 'include_subtree': True},
- 'results': [
- # The inherited assignment from the domain should appear
- # only on the part of the subtree we are interested in
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'domain': 0}},
- {'user': 0, 'role': 1, 'project': 2,
- 'indirect': {'domain': 0}},
- {'user': 0, 'role': 1, 'project': 3,
- 'indirect': {'domain': 0}},
- # And finally the direct assignment on project 2
- {'user': 0, 'role': 2, 'project': 2}]}
- ]
- }
-
- self.execute_assignment_plan(test_plan)
-
- def test_list_user_ids_for_project_with_inheritance(self):
- test_plan = {
- # A domain with a project and sub-project, plus four users,
- # two groups, as well as 4 roles.
- 'entities': {
- 'domains': {'id': CONF.identity.default_domain_id, 'users': 4,
- 'groups': 2,
- 'projects': {'project': 1}},
- 'roles': 4},
- # Each group has a unique user member
- 'group_memberships': [{'group': 0, 'users': [1]},
- {'group': 1, 'users': [3]}],
- # Set up assignments so that there should end up with four
- # effective assignments on project 1 - one direct, one due to
- # group membership and one user assignment inherited from the
- # parent and one group assignment inhertied from the parent.
- 'assignments': [{'user': 0, 'role': 0, 'project': 1},
- {'group': 0, 'role': 1, 'project': 1},
- {'user': 2, 'role': 2, 'project': 0,
- 'inherited_to_projects': True},
- {'group': 1, 'role': 3, 'project': 0,
- 'inherited_to_projects': True}],
- }
- # Use assignment plan helper to create all the entities and
- # assignments - then we'll run our own tests using the data
- test_data = self.execute_assignment_plan(test_plan)
- self.config_fixture.config(group='os_inherit', enabled=True)
- user_ids = self.assignment_api.list_user_ids_for_project(
- test_data['projects'][1]['id'])
- self.assertThat(user_ids, matchers.HasLength(4))
- for x in range(0, 4):
- self.assertIn(test_data['users'][x]['id'], user_ids)
-
- def test_list_role_assignment_using_inherited_sourced_groups(self):
- """Test listing inherited assignments when restricted by groups."""
- test_plan = {
- # A domain with 3 users, 3 groups, 3 projects, a second domain,
- # plus 3 roles.
- 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3},
- 1],
- 'roles': 3},
- # Users 0 & 1 are in the group 0, User 0 also in group 1
- 'group_memberships': [{'group': 0, 'users': [0, 1]},
- {'group': 1, 'users': [0]}],
- # Spread the assignments around - we want to be able to show that
- # if sourced by group, assignments from other sources are excluded
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'group': 0, 'role': 1, 'domain': 1},
- {'group': 1, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True},
- {'group': 1, 'role': 2, 'project': 1},
- {'user': 2, 'role': 1, 'project': 1,
- 'inherited_to_projects': True},
- {'group': 2, 'role': 2, 'project': 2}
- ],
- 'tests': [
- # List all effective assignments sourced from groups 0 and 1.
- # We should see the inherited group assigned on the 3 projects
- # from domain 0, as well as the direct assignments.
- {'params': {'source_from_group_ids': [0, 1],
- 'effective': True},
- 'results': [{'group': 0, 'role': 1, 'domain': 1},
- {'group': 1, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0}},
- {'group': 1, 'role': 2, 'project': 1,
- 'indirect': {'domain': 0}},
- {'group': 1, 'role': 2, 'project': 2,
- 'indirect': {'domain': 0}},
- {'group': 1, 'role': 2, 'project': 1}
- ]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
-
-class ImpliedRoleTests(AssignmentTestHelperMixin):
-
- def test_implied_role_crd(self):
- prior_role_ref = unit.new_role_ref()
- self.role_api.create_role(prior_role_ref['id'], prior_role_ref)
- implied_role_ref = unit.new_role_ref()
- self.role_api.create_role(implied_role_ref['id'], implied_role_ref)
-
- self.role_api.create_implied_role(
- prior_role_ref['id'],
- implied_role_ref['id'])
- implied_role = self.role_api.get_implied_role(
- prior_role_ref['id'],
- implied_role_ref['id'])
- expected_implied_role_ref = {
- 'prior_role_id': prior_role_ref['id'],
- 'implied_role_id': implied_role_ref['id']}
- self.assertDictContainsSubset(
- expected_implied_role_ref,
- implied_role)
-
- self.role_api.delete_implied_role(
- prior_role_ref['id'],
- implied_role_ref['id'])
- self.assertRaises(exception.ImpliedRoleNotFound,
- self.role_api.get_implied_role,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_delete_implied_role_returns_not_found(self):
- self.assertRaises(exception.ImpliedRoleNotFound,
- self.role_api.delete_implied_role,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_role_assignments_simple_tree_of_implied_roles(self):
- """Test that implied roles are expanded out."""
- test_plan = {
- 'entities': {'domains': {'users': 1, 'projects': 1},
- 'roles': 4},
- # Three level tree of implied roles
- 'implied_roles': [{'role': 0, 'implied_roles': 1},
- {'role': 1, 'implied_roles': [2, 3]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0}],
- 'tests': [
- # List all direct assignments for user[0], this should just
- # show the one top level role assignment
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'project': 0}]},
- # Listing in effective mode should show the implied roles
- # expanded out
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 0,
- 'indirect': {'role': 0}},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'role': 1}},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'role': 1}}]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_circular_inferences(self):
- """Test that implied roles are expanded out."""
- test_plan = {
- 'entities': {'domains': {'users': 1, 'projects': 1},
- 'roles': 4},
- # Three level tree of implied roles
- 'implied_roles': [{'role': 0, 'implied_roles': [1]},
- {'role': 1, 'implied_roles': [2, 3]},
- {'role': 3, 'implied_roles': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0}],
- 'tests': [
- # List all direct assignments for user[0], this should just
- # show the one top level role assignment
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'project': 0}]},
- # Listing in effective mode should show the implied roles
- # expanded out
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 0, 'project': 0,
- 'indirect': {'role': 3}},
- {'user': 0, 'role': 1, 'project': 0,
- 'indirect': {'role': 0}},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'role': 1}},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'role': 1}}]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_role_assignments_directed_graph_of_implied_roles(self):
- """Test that a role can have multiple, different prior roles."""
- test_plan = {
- 'entities': {'domains': {'users': 1, 'projects': 1},
- 'roles': 6},
- # Three level tree of implied roles, where one of the roles at the
- # bottom is implied by more than one top level role
- 'implied_roles': [{'role': 0, 'implied_roles': [1, 2]},
- {'role': 1, 'implied_roles': [3, 4]},
- {'role': 5, 'implied_roles': 4}],
- # The user gets both top level roles
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 5, 'project': 0}],
- 'tests': [
- # The implied roles should be expanded out and there should be
- # two entries for the role that had two different prior roles.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 5, 'project': 0},
- {'user': 0, 'role': 1, 'project': 0,
- 'indirect': {'role': 0}},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'role': 0}},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'role': 1}},
- {'user': 0, 'role': 4, 'project': 0,
- 'indirect': {'role': 1}},
- {'user': 0, 'role': 4, 'project': 0,
- 'indirect': {'role': 5}}]},
- ]
- }
- test_data = self.execute_assignment_plan(test_plan)
-
- # We should also be able to get a similar (yet summarized) answer to
- # the above by calling get_roles_for_user_and_project(), which should
- # list the role_ids, yet remove any duplicates
- role_ids = self.assignment_api.get_roles_for_user_and_project(
- test_data['users'][0]['id'], test_data['projects'][0]['id'])
- # We should see 6 entries, not 7, since role index 5 appeared twice in
- # the answer from list_role_assignments
- self.assertThat(role_ids, matchers.HasLength(6))
- for x in range(0, 5):
- self.assertIn(test_data['roles'][x]['id'], role_ids)
-
- def test_role_assignments_implied_roles_filtered_by_role(self):
- """Test that you can filter by role even if roles are implied."""
- test_plan = {
- 'entities': {'domains': {'users': 1, 'projects': 2},
- 'roles': 4},
- # Three level tree of implied roles
- 'implied_roles': [{'role': 0, 'implied_roles': 1},
- {'role': 1, 'implied_roles': [2, 3]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 3, 'project': 1}],
- 'tests': [
- # List effective roles filtering by one of the implied roles,
- # showing that the filter was implied post expansion of
- # implied roles (and that non impled roles are included in
- # the filter
- {'params': {'role': 3, 'effective': True},
- 'results': [{'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'role': 1}},
- {'user': 0, 'role': 3, 'project': 1}]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_role_assignments_simple_tree_of_implied_roles_on_domain(self):
- """Test that implied roles are expanded out when placed on a domain."""
- test_plan = {
- 'entities': {'domains': {'users': 1},
- 'roles': 4},
- # Three level tree of implied roles
- 'implied_roles': [{'role': 0, 'implied_roles': 1},
- {'role': 1, 'implied_roles': [2, 3]}],
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0}],
- 'tests': [
- # List all direct assignments for user[0], this should just
- # show the one top level role assignment
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'domain': 0}]},
- # Listing in effective mode should how the implied roles
- # expanded out
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'domain': 0,
- 'indirect': {'role': 0}},
- {'user': 0, 'role': 2, 'domain': 0,
- 'indirect': {'role': 1}},
- {'user': 0, 'role': 3, 'domain': 0,
- 'indirect': {'role': 1}}]},
- ]
- }
- self.execute_assignment_plan(test_plan)
-
- def test_role_assignments_inherited_implied_roles(self):
- """Test that you can intermix inherited and implied roles."""
- test_plan = {
- 'entities': {'domains': {'users': 1, 'projects': 1},
- 'roles': 4},
- # Simply one level of implied roles
- 'implied_roles': [{'role': 0, 'implied_roles': 1}],
- # Assign to top level role as an inherited assignment to the
- # domain
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all direct assignments for user[0], this should just
- # show the one top level role assignment
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'domain': 0,
- 'inherited_to_projects': 'projects'}]},
- # List in effective mode - we should only see the initial and
- # implied role on the project (since inherited roles are not
- # active on their anchor point).
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0,
- 'indirect': {'domain': 0}},
- {'user': 0, 'role': 1, 'project': 0,
- 'indirect': {'domain': 0, 'role': 0}}]},
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=True)
- self.execute_assignment_plan(test_plan)
-
- def test_role_assignments_domain_specific_with_implied_roles(self):
- test_plan = {
- 'entities': {'domains': {'users': 1, 'projects': 1, 'roles': 2},
- 'roles': 2},
- # Two level tree of implied roles, with the top and 1st level being
- # domain specific roles, and the bottom level being infered global
- # roles.
- 'implied_roles': [{'role': 0, 'implied_roles': [1]},
- {'role': 1, 'implied_roles': [2, 3]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0}],
- 'tests': [
- # List all direct assignments for user[0], this should just
- # show the one top level role assignment, even though this is a
- # domain specific role (since we are in non-effective mode and
- # we show any direct role assignment in that mode).
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'project': 0}]},
- # Now the effective ones - so the implied roles should be
- # expanded out, as well as any domain specific roles should be
- # removed.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'role': 1}},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'role': 1}}]},
- ]
- }
- self.execute_assignment_plan(test_plan)
diff --git a/keystone-moon/keystone/tests/unit/assignment/test_core.py b/keystone-moon/keystone/tests/unit/assignment/test_core.py
deleted file mode 100644
index 494e19c3..00000000
--- a/keystone-moon/keystone/tests/unit/assignment/test_core.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-
-
-class RoleTests(object):
-
- def test_get_role_returns_not_found(self):
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- uuid.uuid4().hex)
-
- def test_create_duplicate_role_name_fails(self):
- role = unit.new_role_ref(id='fake1', name='fake1name')
- self.role_api.create_role('fake1', role)
- role['id'] = 'fake2'
- self.assertRaises(exception.Conflict,
- self.role_api.create_role,
- 'fake2',
- role)
-
- def test_rename_duplicate_role_name_fails(self):
- role1 = unit.new_role_ref(id='fake1', name='fake1name')
- role2 = unit.new_role_ref(id='fake2', name='fake2name')
- self.role_api.create_role('fake1', role1)
- self.role_api.create_role('fake2', role2)
- role1['name'] = 'fake2name'
- self.assertRaises(exception.Conflict,
- self.role_api.update_role,
- 'fake1',
- role1)
-
- def test_role_crud(self):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_ref = self.role_api.get_role(role['id'])
- role_ref_dict = {x: role_ref[x] for x in role_ref}
- self.assertDictEqual(role, role_ref_dict)
-
- role['name'] = uuid.uuid4().hex
- updated_role_ref = self.role_api.update_role(role['id'], role)
- role_ref = self.role_api.get_role(role['id'])
- role_ref_dict = {x: role_ref[x] for x in role_ref}
- self.assertDictEqual(role, role_ref_dict)
- self.assertDictEqual(role_ref_dict, updated_role_ref)
-
- self.role_api.delete_role(role['id'])
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role['id'])
-
- def test_update_role_returns_not_found(self):
- role = unit.new_role_ref()
- self.assertRaises(exception.RoleNotFound,
- self.role_api.update_role,
- role['id'],
- role)
-
- def test_list_roles(self):
- roles = self.role_api.list_roles()
- self.assertEqual(len(default_fixtures.ROLES), len(roles))
- role_ids = set(role['id'] for role in roles)
- expected_role_ids = set(role['id'] for role in default_fixtures.ROLES)
- self.assertEqual(expected_role_ids, role_ids)
-
- @unit.skip_if_cache_disabled('role')
- def test_cache_layer_role_crud(self):
- role = unit.new_role_ref()
- role_id = role['id']
- # Create role
- self.role_api.create_role(role_id, role)
- role_ref = self.role_api.get_role(role_id)
- updated_role_ref = copy.deepcopy(role_ref)
- updated_role_ref['name'] = uuid.uuid4().hex
- # Update role, bypassing the role api manager
- self.role_api.driver.update_role(role_id, updated_role_ref)
- # Verify get_role still returns old ref
- self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
- # Invalidate Cache
- self.role_api.get_role.invalidate(self.role_api, role_id)
- # Verify get_role returns the new role_ref
- self.assertDictEqual(updated_role_ref,
- self.role_api.get_role(role_id))
- # Update role back to original via the assignment api manager
- self.role_api.update_role(role_id, role_ref)
- # Verify get_role returns the original role ref
- self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
- # Delete role bypassing the role api manager
- self.role_api.driver.delete_role(role_id)
- # Verify get_role still returns the role_ref
- self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
- # Invalidate cache
- self.role_api.get_role.invalidate(self.role_api, role_id)
- # Verify RoleNotFound is now raised
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role_id)
- # recreate role
- self.role_api.create_role(role_id, role)
- self.role_api.get_role(role_id)
- # delete role via the assignment api manager
- self.role_api.delete_role(role_id)
- # verity RoleNotFound is now raised
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role_id)
diff --git a/keystone-moon/keystone/tests/unit/auth/__init__.py b/keystone-moon/keystone/tests/unit/auth/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/auth/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/auth/test_controllers.py b/keystone-moon/keystone/tests/unit/auth/test_controllers.py
deleted file mode 100644
index 76f2776a..00000000
--- a/keystone-moon/keystone/tests/unit/auth/test_controllers.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright 2015 IBM Corp.
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import mock
-from oslo_config import cfg
-from oslo_config import fixture as config_fixture
-from oslo_utils import importutils
-from oslotest import mockpatch
-import stevedore
-from stevedore import extension
-
-from keystone.auth import controllers
-from keystone.tests import unit
-
-
-class TestLoadAuthMethod(unit.BaseTestCase):
- def test_entrypoint_works(self):
- method = uuid.uuid4().hex
- plugin_name = self.getUniqueString()
-
- # Register the method using the given plugin
- cf = self.useFixture(config_fixture.Config())
- cf.register_opt(cfg.StrOpt(method), group='auth')
- cf.config(group='auth', **{method: plugin_name})
-
- # Setup stevedore.DriverManager to return a driver for the plugin
- extension_ = extension.Extension(
- plugin_name, entry_point=mock.sentinel.entry_point,
- plugin=mock.sentinel.plugin,
- obj=mock.sentinel.driver)
- auth_plugin_namespace = 'keystone.auth.%s' % method
- fake_driver_manager = stevedore.DriverManager.make_test_instance(
- extension_, namespace=auth_plugin_namespace)
-
- driver_manager_mock = self.useFixture(mockpatch.PatchObject(
- stevedore, 'DriverManager', return_value=fake_driver_manager)).mock
-
- driver = controllers.load_auth_method(method)
-
- self.assertEqual(auth_plugin_namespace, fake_driver_manager.namespace)
- driver_manager_mock.assert_called_once_with(
- auth_plugin_namespace, plugin_name, invoke_on_load=True)
- self.assertIs(driver, mock.sentinel.driver)
-
- def test_entrypoint_fails_import_works(self):
- method = uuid.uuid4().hex
- plugin_name = self.getUniqueString()
-
- # Register the method using the given plugin
- cf = self.useFixture(config_fixture.Config())
- cf.register_opt(cfg.StrOpt(method), group='auth')
- cf.config(group='auth', **{method: plugin_name})
-
- # stevedore.DriverManager raises RuntimeError if it can't load the
- # driver.
- self.useFixture(mockpatch.PatchObject(
- stevedore, 'DriverManager', side_effect=RuntimeError))
-
- self.useFixture(mockpatch.PatchObject(
- importutils, 'import_object', return_value=mock.sentinel.driver))
-
- driver = controllers.load_auth_method(method)
- self.assertIs(driver, mock.sentinel.driver)
-
- def test_entrypoint_fails_import_fails(self):
- method = uuid.uuid4().hex
- plugin_name = self.getUniqueString()
-
- # Register the method using the given plugin
- cf = self.useFixture(config_fixture.Config())
- cf.register_opt(cfg.StrOpt(method), group='auth')
- cf.config(group='auth', **{method: plugin_name})
-
- # stevedore.DriverManager raises RuntimeError if it can't load the
- # driver.
- self.useFixture(mockpatch.PatchObject(
- stevedore, 'DriverManager', side_effect=RuntimeError))
-
- class TestException(Exception):
- pass
-
- self.useFixture(mockpatch.PatchObject(
- importutils, 'import_object', side_effect=TestException))
-
- self.assertRaises(TestException, controllers.load_auth_method, method)
diff --git a/keystone-moon/keystone/tests/unit/backend/__init__.py b/keystone-moon/keystone/tests/unit/backend/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/core_ldap.py b/keystone-moon/keystone/tests/unit/backend/core_ldap.py
deleted file mode 100644
index 8b72c62a..00000000
--- a/keystone-moon/keystone/tests/unit/backend/core_ldap.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import ldap
-
-from oslo_config import cfg
-
-from keystone.common import cache
-from keystone.common import ldap as common_ldap
-from keystone.common.ldap import core as common_ldap_core
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import fakeldap
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-
-def create_group_container(identity_api):
- # Create the groups base entry (ou=Groups,cn=example,cn=com)
- group_api = identity_api.driver.group
- conn = group_api.get_connection()
- dn = 'ou=Groups,cn=example,cn=com'
- conn.add_s(dn, [('objectclass', ['organizationalUnit']),
- ('ou', ['Groups'])])
-
-
-class BaseBackendLdapCommon(object):
- """Mixin class to set up generic LDAP backends."""
-
- def setUp(self):
- super(BaseBackendLdapCommon, self).setUp()
-
- common_ldap.register_handler('fake://', fakeldap.FakeLdap)
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- self.addCleanup(common_ldap_core._HANDLERS.clear)
- self.addCleanup(self.clear_database)
-
- def _get_domain_fixture(self):
- """Domains in LDAP are read-only, so just return the static one."""
- return self.resource_api.get_domain(CONF.identity.default_domain_id)
-
- def clear_database(self):
- for shelf in fakeldap.FakeShelves:
- fakeldap.FakeShelves[shelf].clear()
-
- def get_config(self, domain_id):
- # Only one conf structure unless we are using separate domain backends
- return CONF
-
- def config_overrides(self):
- super(BaseBackendLdapCommon, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
-
- def config_files(self):
- config_files = super(BaseBackendLdapCommon, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- def get_user_enabled_vals(self, user):
- user_dn = (
- self.identity_api.driver.user._id_to_dn_string(user['id']))
- enabled_attr_name = CONF.ldap.user_enabled_attribute
-
- ldap_ = self.identity_api.driver.user.get_connection()
- res = ldap_.search_s(user_dn,
- ldap.SCOPE_BASE,
- u'(sn=%s)' % user['name'])
- if enabled_attr_name in res[0][1]:
- return res[0][1][enabled_attr_name]
- else:
- return None
-
-
-class BaseBackendLdap(object):
- """Mixin class to set up an all-LDAP configuration."""
-
- def setUp(self):
- # NOTE(dstanek): The database must be setup prior to calling the
- # parent's setUp. The parent's setUp uses services (like
- # credentials) that require a database.
- self.useFixture(database.Database())
- super(BaseBackendLdap, self).setUp()
-
- def load_fixtures(self, fixtures):
- # Override super impl since need to create group container.
- create_group_container(self.identity_api)
- super(BaseBackendLdap, self).load_fixtures(fixtures)
-
-
-class BaseBackendLdapIdentitySqlEverythingElse(unit.SQLDriverOverrides):
- """Mixin base for Identity LDAP, everything else SQL backend tests."""
-
- def config_files(self):
- config_files = super(BaseBackendLdapIdentitySqlEverythingElse,
- self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf'))
- return config_files
-
- def setUp(self):
- sqldb = self.useFixture(database.Database())
- super(BaseBackendLdapIdentitySqlEverythingElse, self).setUp()
- self.clear_database()
- self.load_backends()
- cache.configure_cache()
-
- sqldb.recreate()
- self.load_fixtures(default_fixtures)
- # defaulted by the data load
- self.user_foo['enabled'] = True
-
- def config_overrides(self):
- super(BaseBackendLdapIdentitySqlEverythingElse,
- self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='resource', driver='sql')
- self.config_fixture.config(group='assignment', driver='sql')
-
-
-class BaseBackendLdapIdentitySqlEverythingElseWithMapping(object):
- """Mixin base class to test mapping of default LDAP backend.
-
- The default configuration is not to enable mapping when using a single
- backend LDAP driver. However, a cloud provider might want to enable
- the mapping, hence hiding the LDAP IDs from any clients of keystone.
- Setting backward_compatible_ids to False will enable this mapping.
-
- """
-
- def config_overrides(self):
- super(BaseBackendLdapIdentitySqlEverythingElseWithMapping,
- self).config_overrides()
- self.config_fixture.config(group='identity_mapping',
- backward_compatible_ids=False)
diff --git a/keystone-moon/keystone/tests/unit/backend/core_sql.py b/keystone-moon/keystone/tests/unit/backend/core_sql.py
deleted file mode 100644
index 8c9f4957..00000000
--- a/keystone-moon/keystone/tests/unit/backend/core_sql.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import sqlalchemy
-
-from keystone.common import sql
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import database
-
-
-class BaseBackendSqlTests(unit.SQLDriverOverrides, unit.TestCase):
-
- def setUp(self):
- super(BaseBackendSqlTests, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
-
- # populate the engine with tables & fixtures
- self.load_fixtures(default_fixtures)
- # defaulted by the data load
- self.user_foo['enabled'] = True
-
- def config_files(self):
- config_files = super(BaseBackendSqlTests, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
-
-class BaseBackendSqlModels(BaseBackendSqlTests):
-
- def select_table(self, name):
- table = sqlalchemy.Table(name,
- sql.ModelBase.metadata,
- autoload=True)
- s = sqlalchemy.select([table])
- return s
-
- def assertExpectedSchema(self, table, cols):
- table = self.select_table(table)
- for col, type_, length in cols:
- self.assertIsInstance(table.c[col].type, type_)
- if length:
- self.assertEqual(length, table.c[col].type.length)
diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py b/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/core.py b/keystone-moon/keystone/tests/unit/backend/domain_config/core.py
deleted file mode 100644
index 7bbbf313..00000000
--- a/keystone-moon/keystone/tests/unit/backend/domain_config/core.py
+++ /dev/null
@@ -1,601 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-import mock
-from testtools import matchers
-
-from keystone import exception
-from keystone.tests import unit
-
-
-class DomainConfigTests(object):
-
- def setUp(self):
- self.domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(self.domain['id'], self.domain)
- self.addCleanup(self.clean_up_domain)
-
- def clean_up_domain(self):
- # NOTE(henry-nash): Deleting the domain will also delete any domain
- # configs for this domain.
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
- self.resource_api.delete_domain(self.domain['id'])
- del self.domain
-
- def _domain_config_crud(self, sensitive):
- group = uuid.uuid4().hex
- option = uuid.uuid4().hex
- value = uuid.uuid4().hex
- self.domain_config_api.create_config_option(
- self.domain['id'], group, option, value, sensitive)
- res = self.domain_config_api.get_config_option(
- self.domain['id'], group, option, sensitive)
- config = {'group': group, 'option': option, 'value': value}
- self.assertEqual(config, res)
-
- value = uuid.uuid4().hex
- self.domain_config_api.update_config_option(
- self.domain['id'], group, option, value, sensitive)
- res = self.domain_config_api.get_config_option(
- self.domain['id'], group, option, sensitive)
- config = {'group': group, 'option': option, 'value': value}
- self.assertEqual(config, res)
-
- self.domain_config_api.delete_config_options(
- self.domain['id'], group, option, sensitive)
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config_option,
- self.domain['id'], group, option, sensitive)
- # ...and silent if we try to delete it again
- self.domain_config_api.delete_config_options(
- self.domain['id'], group, option, sensitive)
-
- def test_whitelisted_domain_config_crud(self):
- self._domain_config_crud(sensitive=False)
-
- def test_sensitive_domain_config_crud(self):
- self._domain_config_crud(sensitive=True)
-
- def _list_domain_config(self, sensitive):
- """Test listing by combination of domain, group & option."""
-
- config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- # Put config2 in the same group as config1
- config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- config3 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': 100}
- for config in [config1, config2, config3]:
- self.domain_config_api.create_config_option(
- self.domain['id'], config['group'], config['option'],
- config['value'], sensitive)
-
- # Try listing all items from a domain
- res = self.domain_config_api.list_config_options(
- self.domain['id'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(3))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config2, config3])
-
- # Try listing by domain and group
- res = self.domain_config_api.list_config_options(
- self.domain['id'], group=config1['group'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(2))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config2])
-
- # Try listing by domain, group and option
- res = self.domain_config_api.list_config_options(
- self.domain['id'], group=config2['group'],
- option=config2['option'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(1))
- self.assertEqual(config2, res[0])
-
- def test_list_whitelisted_domain_config_crud(self):
- self._list_domain_config(False)
-
- def test_list_sensitive_domain_config_crud(self):
- self._list_domain_config(True)
-
- def _delete_domain_configs(self, sensitive):
- """Test deleting by combination of domain, group & option."""
-
- config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- # Put config2 and config3 in the same group as config1
- config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- config3 = {'group': config1['group'], 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- config4 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- for config in [config1, config2, config3, config4]:
- self.domain_config_api.create_config_option(
- self.domain['id'], config['group'], config['option'],
- config['value'], sensitive)
-
- # Try deleting by domain, group and option
- res = self.domain_config_api.delete_config_options(
- self.domain['id'], group=config2['group'],
- option=config2['option'], sensitive=sensitive)
- res = self.domain_config_api.list_config_options(
- self.domain['id'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(3))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config3, config4])
-
- # Try deleting by domain and group
- res = self.domain_config_api.delete_config_options(
- self.domain['id'], group=config4['group'], sensitive=sensitive)
- res = self.domain_config_api.list_config_options(
- self.domain['id'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(2))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config3])
-
- # Try deleting all items from a domain
- res = self.domain_config_api.delete_config_options(
- self.domain['id'], sensitive=sensitive)
- res = self.domain_config_api.list_config_options(
- self.domain['id'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(0))
-
- def test_delete_whitelisted_domain_configs(self):
- self._delete_domain_configs(False)
-
- def test_delete_sensitive_domain_configs(self):
- self._delete_domain_configs(True)
-
- def _create_domain_config_twice(self, sensitive):
- """Test conflict error thrown if create the same option twice."""
-
- config = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
-
- self.domain_config_api.create_config_option(
- self.domain['id'], config['group'], config['option'],
- config['value'], sensitive=sensitive)
- self.assertRaises(exception.Conflict,
- self.domain_config_api.create_config_option,
- self.domain['id'], config['group'], config['option'],
- config['value'], sensitive=sensitive)
-
- def test_create_whitelisted_domain_config_twice(self):
- self._create_domain_config_twice(False)
-
- def test_create_sensitive_domain_config_twice(self):
- self._create_domain_config_twice(True)
-
- def test_delete_domain_deletes_configs(self):
- """Test domain deletion clears the domain configs."""
-
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- # Put config2 in the same group as config1
- config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- self.domain_config_api.create_config_option(
- domain['id'], config1['group'], config1['option'],
- config1['value'])
- self.domain_config_api.create_config_option(
- domain['id'], config2['group'], config2['option'],
- config2['value'], sensitive=True)
- res = self.domain_config_api.list_config_options(
- domain['id'])
- self.assertThat(res, matchers.HasLength(1))
- res = self.domain_config_api.list_config_options(
- domain['id'], sensitive=True)
- self.assertThat(res, matchers.HasLength(1))
-
- # Now delete the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
- self.resource_api.delete_domain(domain['id'])
-
- # Check domain configs have also been deleted
- res = self.domain_config_api.list_config_options(
- domain['id'])
- self.assertThat(res, matchers.HasLength(0))
- res = self.domain_config_api.list_config_options(
- domain['id'], sensitive=True)
- self.assertThat(res, matchers.HasLength(0))
-
- def test_create_domain_config_including_sensitive_option(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- # password is sensitive, so check that the whitelisted portion and
- # the sensitive piece have been stored in the appropriate locations.
- res = self.domain_config_api.get_config(self.domain['id'])
- config_whitelisted = copy.deepcopy(config)
- config_whitelisted['ldap'].pop('password')
- self.assertEqual(config_whitelisted, res)
- res = self.domain_config_api.get_config_option(
- self.domain['id'], 'ldap', 'password', sensitive=True)
- self.assertEqual(config['ldap']['password'], res['value'])
-
- # Finally, use the non-public API to get back the whole config
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(config, res)
-
- def test_get_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- res = self.domain_config_api.get_config(self.domain['id'],
- group='identity')
- config_partial = copy.deepcopy(config)
- config_partial.pop('ldap')
- self.assertEqual(config_partial, res)
- res = self.domain_config_api.get_config(
- self.domain['id'], group='ldap', option='user_tree_dn')
- self.assertEqual({'user_tree_dn': config['ldap']['user_tree_dn']}, res)
- # ...but we should fail to get a sensitive option
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'],
- group='ldap', option='password')
-
- def test_delete_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- self.domain_config_api.delete_config(
- self.domain['id'], group='identity')
- config_partial = copy.deepcopy(config)
- config_partial.pop('identity')
- config_partial['ldap'].pop('password')
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(config_partial, res)
-
- self.domain_config_api.delete_config(
- self.domain['id'], group='ldap', option='url')
- config_partial = copy.deepcopy(config_partial)
- config_partial['ldap'].pop('url')
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(config_partial, res)
-
- def test_get_options_not_in_domain_config(self):
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'])
- config = {'ldap': {'url': uuid.uuid4().hex}}
-
- self.domain_config_api.create_config(self.domain['id'], config)
-
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'],
- group='identity')
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'],
- group='ldap', option='user_tree_dn')
-
- def test_get_sensitive_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual({}, res)
- self.domain_config_api.create_config(self.domain['id'], config)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(config, res)
-
- def test_update_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- # Try updating a group
- new_config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_filter': uuid.uuid4().hex}}
- res = self.domain_config_api.update_config(
- self.domain['id'], new_config, group='ldap')
- expected_config = copy.deepcopy(config)
- expected_config['ldap']['url'] = new_config['ldap']['url']
- expected_config['ldap']['user_filter'] = (
- new_config['ldap']['user_filter'])
- expected_full_config = copy.deepcopy(expected_config)
- expected_config['ldap'].pop('password')
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(expected_config, res)
- # The sensitive option should still existsss
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(expected_full_config, res)
-
- # Try updating a single whitelisted option
- self.domain_config_api.delete_config(self.domain['id'])
- self.domain_config_api.create_config(self.domain['id'], config)
- new_config = {'url': uuid.uuid4().hex}
- res = self.domain_config_api.update_config(
- self.domain['id'], new_config, group='ldap', option='url')
-
- # Make sure whitelisted and full config is updated
- expected_whitelisted_config = copy.deepcopy(config)
- expected_whitelisted_config['ldap']['url'] = new_config['url']
- expected_full_config = copy.deepcopy(expected_whitelisted_config)
- expected_whitelisted_config['ldap'].pop('password')
- self.assertEqual(expected_whitelisted_config, res)
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(expected_whitelisted_config, res)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(expected_full_config, res)
-
- # Try updating a single sensitive option
- self.domain_config_api.delete_config(self.domain['id'])
- self.domain_config_api.create_config(self.domain['id'], config)
- new_config = {'password': uuid.uuid4().hex}
- res = self.domain_config_api.update_config(
- self.domain['id'], new_config, group='ldap', option='password')
- # The whitelisted config should not have changed...
- expected_whitelisted_config = copy.deepcopy(config)
- expected_full_config = copy.deepcopy(config)
- expected_whitelisted_config['ldap'].pop('password')
- self.assertEqual(expected_whitelisted_config, res)
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(expected_whitelisted_config, res)
- expected_full_config['ldap']['password'] = new_config['password']
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- # ...but the sensitive piece should have.
- self.assertEqual(expected_full_config, res)
-
- def test_update_invalid_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- # An extra group, when specifying one group should fail
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group='ldap')
- # An extra option, when specifying one option should fail
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config['ldap'],
- group='ldap', option='url')
-
- # Now try the right number of groups/options, but just not
- # ones that are in the config provided
- config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group='identity')
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config['ldap'], group='ldap',
- option='url')
-
- # Now some valid groups/options, but just not ones that are in the
- # existing config
- config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- config_wrong_group = {'identity': {'driver': uuid.uuid4().hex}}
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.update_config,
- self.domain['id'], config_wrong_group,
- group='identity')
- config_wrong_option = {'url': uuid.uuid4().hex}
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.update_config,
- self.domain['id'], config_wrong_option,
- group='ldap', option='url')
-
- # And finally just some bad groups/options
- bad_group = uuid.uuid4().hex
- config = {bad_group: {'user': uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group=bad_group,
- option='user')
- bad_option = uuid.uuid4().hex
- config = {'ldap': {bad_option: uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group='ldap',
- option=bad_option)
-
- def test_create_invalid_domain_config(self):
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], {})
- config = {uuid.uuid4().hex: uuid.uuid4().hex}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
- config = {uuid.uuid4().hex: {uuid.uuid4().hex: uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
- config = {'ldap': {uuid.uuid4().hex: uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
- # Try an option that IS in the standard conf, but neither whitelisted
- # or marked as sensitive
- config = {'ldap': {'role_tree_dn': uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
-
- def test_delete_invalid_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- # Try deleting a group not in the config
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.delete_config,
- self.domain['id'], group='identity')
- # Try deleting an option not in the config
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.delete_config,
- self.domain['id'],
- group='ldap', option='user_tree_dn')
-
- def test_sensitive_substitution_in_domain_config(self):
- # Create a config that contains a whitelisted option that requires
- # substitution of a sensitive option.
- config = {'ldap': {'url': 'my_url/%(password)s',
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- # Read back the config with the internal method and ensure that the
- # substitution has taken place.
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- expected_url = (
- config['ldap']['url'] % {'password': config['ldap']['password']})
- self.assertEqual(expected_url, res['ldap']['url'])
-
- def test_invalid_sensitive_substitution_in_domain_config(self):
- """Check that invalid substitutions raise warnings."""
-
- mock_log = mock.Mock()
-
- invalid_option_config = {
- 'ldap': {'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
-
- for invalid_option in ['my_url/%(passssword)s',
- 'my_url/%(password',
- 'my_url/%(password)',
- 'my_url/%(password)d']:
- invalid_option_config['ldap']['url'] = invalid_option
- self.domain_config_api.create_config(
- self.domain['id'], invalid_option_config)
-
- with mock.patch('keystone.resource.core.LOG', mock_log):
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- mock_log.warn.assert_any_call(mock.ANY)
- self.assertEqual(
- invalid_option_config['ldap']['url'], res['ldap']['url'])
-
- def test_escaped_sequence_in_domain_config(self):
- """Check that escaped '%(' doesn't get interpreted."""
-
- mock_log = mock.Mock()
-
- escaped_option_config = {
- 'ldap': {'url': 'my_url/%%(password)s',
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
-
- self.domain_config_api.create_config(
- self.domain['id'], escaped_option_config)
-
- with mock.patch('keystone.resource.core.LOG', mock_log):
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertFalse(mock_log.warn.called)
- # The escaping '%' should have been removed
- self.assertEqual('my_url/%(password)s', res['ldap']['url'])
-
- @unit.skip_if_cache_disabled('domain_config')
- def test_cache_layer_get_sensitive_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- # cache the result
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(config, res)
-
- # delete, bypassing domain config manager api
- self.domain_config_api.delete_config_options(self.domain['id'])
- self.domain_config_api.delete_config_options(self.domain['id'],
- sensitive=True)
-
- self.assertDictEqual(
- res, self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id']))
- self.domain_config_api.get_config_with_sensitive_info.invalidate(
- self.domain_config_api, self.domain['id'])
- self.assertDictEqual(
- {},
- self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id']))
-
- def test_config_registration(self):
- type = uuid.uuid4().hex
- self.domain_config_api.obtain_registration(
- self.domain['id'], type)
- self.domain_config_api.release_registration(
- self.domain['id'], type=type)
-
- # Make sure that once someone has it, nobody else can get it.
- # This includes the domain who already has it.
- self.domain_config_api.obtain_registration(
- self.domain['id'], type)
- self.assertFalse(
- self.domain_config_api.obtain_registration(
- self.domain['id'], type))
-
- # Make sure we can read who does have it
- self.assertEqual(
- self.domain['id'],
- self.domain_config_api.read_registration(type))
-
- # Make sure releasing it is silent if the domain specified doesn't
- # have the registration
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- self.domain_config_api.release_registration(
- domain2['id'], type=type)
-
- # If nobody has the type registered, then trying to read it should
- # raise ConfigRegistrationNotFound
- self.domain_config_api.release_registration(
- self.domain['id'], type=type)
- self.assertRaises(exception.ConfigRegistrationNotFound,
- self.domain_config_api.read_registration,
- type)
-
- # Finally check multiple registrations are cleared if you free the
- # registration without specifying the type
- type2 = uuid.uuid4().hex
- self.domain_config_api.obtain_registration(
- self.domain['id'], type)
- self.domain_config_api.obtain_registration(
- self.domain['id'], type2)
- self.domain_config_api.release_registration(self.domain['id'])
- self.assertRaises(exception.ConfigRegistrationNotFound,
- self.domain_config_api.read_registration,
- type)
- self.assertRaises(exception.ConfigRegistrationNotFound,
- self.domain_config_api.read_registration,
- type2)
diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py b/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py
deleted file mode 100644
index 6459ede1..00000000
--- a/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from keystone.common import sql
-from keystone.tests.unit.backend import core_sql
-from keystone.tests.unit.backend.domain_config import core
-
-
-class SqlDomainConfigModels(core_sql.BaseBackendSqlModels):
-
- def test_whitelisted_model(self):
- cols = (('domain_id', sql.String, 64),
- ('group', sql.String, 255),
- ('option', sql.String, 255),
- ('value', sql.JsonBlob, None))
- self.assertExpectedSchema('whitelisted_config', cols)
-
- def test_sensitive_model(self):
- cols = (('domain_id', sql.String, 64),
- ('group', sql.String, 255),
- ('option', sql.String, 255),
- ('value', sql.JsonBlob, None))
- self.assertExpectedSchema('sensitive_config', cols)
-
-
-class SqlDomainConfig(core_sql.BaseBackendSqlTests, core.DomainConfigTests):
- def setUp(self):
- super(SqlDomainConfig, self).setUp()
- # core.DomainConfigTests is effectively a mixin class, so make sure we
- # call its setup
- core.DomainConfigTests.setUp(self)
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py
deleted file mode 100644
index da1490a7..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.tests.unit import test_backend_sql
-
-
-class SqlIdentityV8(test_backend_sql.SqlIdentity):
- """Test that a V8 driver still passes the same tests.
-
- We use the SQL driver as an example of a V8 legacy driver.
-
- """
-
- def config_overrides(self):
- super(SqlIdentityV8, self).config_overrides()
- # V8 SQL specific driver overrides
- self.config_fixture.config(
- group='assignment',
- driver='keystone.assignment.V8_backends.sql.Assignment')
- self.use_specific_sql_driver_version(
- 'keystone.assignment', 'backends', 'V8_')
-
- def test_delete_project_assignments_same_id_as_domain(self):
- self.skipTest("V8 doesn't support project acting as a domain.")
-
- def test_delete_user_assignments_user_same_id_as_group(self):
- self.skipTest("Groups and users with the same ID are not supported.")
-
- def test_delete_group_assignments_group_same_id_as_user(self):
- self.skipTest("Groups and users with the same ID are not supported.")
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py
deleted file mode 100644
index d5469768..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from six.moves import http_client
-
-from keystone.tests.unit import test_v3_federation
-
-
-class FederatedSetupMixinV8(object):
- def useV8driver(self):
- # We use the SQL driver as an example V8 driver, so override
- # the current driver with that version.
- self.config_fixture.config(
- group='federation',
- driver='keystone.federation.V8_backends.sql.Federation')
- self.use_specific_sql_driver_version(
- 'keystone.federation', 'backends', 'V8_')
-
-
-class FederatedIdentityProviderTestsV8(
- test_v3_federation.FederatedIdentityProviderTests,
- FederatedSetupMixinV8):
- """Test that a V8 driver still passes the same tests."""
-
- def config_overrides(self):
- super(FederatedIdentityProviderTestsV8, self).config_overrides()
- self.useV8driver()
-
- def test_create_idp_remote_repeated(self):
- """Creates two IdentityProvider entities with some remote_ids
-
- A remote_id is the same for both so the second IdP is not
- created because of the uniqueness of the remote_ids
-
- Expect HTTP 409 Conflict code for the latter call.
-
- Note: V9 drivers and later augment the conflict message with
- additional information, which won't be present if we are running
- a V8 driver - so override the newer tests to just ensure a
- conflict message is raised.
- """
- body = self.default_body.copy()
- repeated_remote_id = uuid.uuid4().hex
- body['remote_ids'] = [uuid.uuid4().hex,
- uuid.uuid4().hex,
- uuid.uuid4().hex,
- repeated_remote_id]
- self._create_default_idp(body=body)
-
- url = self.base_url(suffix=uuid.uuid4().hex)
- body['remote_ids'] = [uuid.uuid4().hex,
- repeated_remote_id]
- self.put(url, body={'identity_provider': body},
- expected_status=http_client.CONFLICT)
-
- def test_check_idp_uniqueness(self):
- """Add same IdP twice.
-
- Expect HTTP 409 Conflict code for the latter call.
-
- Note: V9 drivers and later augment the conflict message with
- additional information, which won't be present if we are running
- a V8 driver - so override the newer tests to just ensure a
- conflict message is raised.
- """
- url = self.base_url(suffix=uuid.uuid4().hex)
- body = self._http_idp_input()
- self.put(url, body={'identity_provider': body},
- expected_status=http_client.CREATED)
- self.put(url, body={'identity_provider': body},
- expected_status=http_client.CONFLICT)
-
-
-class MappingCRUDTestsV8(
- test_v3_federation.MappingCRUDTests,
- FederatedSetupMixinV8):
- """Test that a V8 driver still passes the same tests."""
-
- def config_overrides(self):
- super(MappingCRUDTestsV8, self).config_overrides()
- self.useV8driver()
-
-
-class ServiceProviderTestsV8(
- test_v3_federation.ServiceProviderTests,
- FederatedSetupMixinV8):
- """Test that a V8 driver still passes the same tests."""
-
- def config_overrides(self):
- super(ServiceProviderTestsV8, self).config_overrides()
- self.useV8driver()
-
- def test_filter_list_sp_by_id(self):
- self.skipTest('Operation not supported in v8 and earlier drivers')
-
- def test_filter_list_sp_by_enabled(self):
- self.skipTest('Operation not supported in v8 and earlier drivers')
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py
deleted file mode 100644
index 16acbdc3..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import unittest
-
-from keystone.resource.V8_backends import sql
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit.resource import test_backends
-from keystone.tests.unit import test_backend_sql
-
-
-class SqlIdentityV8(test_backend_sql.SqlIdentity):
- """Test that a V8 driver still passes the same tests.
-
- We use the SQL driver as an example of a V8 legacy driver.
-
- """
-
- def config_overrides(self):
- super(SqlIdentityV8, self).config_overrides()
- # V8 SQL specific driver overrides
- self.config_fixture.config(
- group='resource',
- driver='keystone.resource.V8_backends.sql.Resource')
- self.use_specific_sql_driver_version(
- 'keystone.resource', 'backends', 'V8_')
-
- def test_delete_projects_from_ids(self):
- self.skipTest('Operation not supported in v8 and earlier drivers')
-
- def test_delete_projects_from_ids_with_no_existing_project_id(self):
- self.skipTest('Operation not supported in v8 and earlier drivers')
-
- def test_delete_project_cascade(self):
- self.skipTest('Operation not supported in v8 and earlier drivers')
-
- def test_delete_large_project_cascade(self):
- self.skipTest('Operation not supported in v8 and earlier drivers')
-
- def test_hidden_project_domain_root_is_really_hidden(self):
- self.skipTest('Operation not supported in v8 and earlier drivers')
-
-
-class TestSqlResourceDriverV8(unit.BaseTestCase,
- test_backends.ResourceDriverTests):
- def setUp(self):
- super(TestSqlResourceDriverV8, self).setUp()
-
- version_specifiers = {
- 'keystone.resource': {
- 'versionless_backend': 'backends',
- 'versioned_backend': 'V8_backends'
- }
- }
- self.useFixture(database.Database(version_specifiers))
-
- self.driver = sql.Resource()
-
- @unittest.skip('Null domain not allowed.')
- def test_create_project_null_domain(self):
- pass
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py
deleted file mode 100644
index d9378c30..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.tests.unit import test_backend_sql
-
-
-class SqlIdentityV8(test_backend_sql.SqlIdentity):
- """Test that a V8 driver still passes the same tests.
-
- We use the SQL driver as an example of a V8 legacy driver.
-
- """
-
- def config_overrides(self):
- super(SqlIdentityV8, self).config_overrides()
- # V8 SQL specific driver overrides
- self.config_fixture.config(
- group='role',
- driver='keystone.assignment.V8_role_backends.sql.Role')
- self.use_specific_sql_driver_version(
- 'keystone.assignment', 'role_backends', 'V8_')
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/role/__init__.py b/keystone-moon/keystone/tests/unit/backend/role/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/backend/role/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/backend/role/core.py b/keystone-moon/keystone/tests/unit/backend/role/core.py
deleted file mode 100644
index d6e0d65c..00000000
--- a/keystone-moon/keystone/tests/unit/backend/role/core.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-
-
-class RoleTests(object):
-
- def test_get_role_404(self):
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- uuid.uuid4().hex)
-
- def test_create_duplicate_role_name_fails(self):
- role = {'id': 'fake1',
- 'name': 'fake1name'}
- self.role_api.create_role('fake1', role)
- role['id'] = 'fake2'
- self.assertRaises(exception.Conflict,
- self.role_api.create_role,
- 'fake2',
- role)
-
- def test_rename_duplicate_role_name_fails(self):
- role1 = {
- 'id': 'fake1',
- 'name': 'fake1name'
- }
- role2 = {
- 'id': 'fake2',
- 'name': 'fake2name'
- }
- self.role_api.create_role('fake1', role1)
- self.role_api.create_role('fake2', role2)
- role1['name'] = 'fake2name'
- self.assertRaises(exception.Conflict,
- self.role_api.update_role,
- 'fake1',
- role1)
-
- def test_role_crud(self):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_ref = self.role_api.get_role(role['id'])
- role_ref_dict = {x: role_ref[x] for x in role_ref}
- self.assertDictEqual(role_ref_dict, role)
-
- role['name'] = uuid.uuid4().hex
- updated_role_ref = self.role_api.update_role(role['id'], role)
- role_ref = self.role_api.get_role(role['id'])
- role_ref_dict = {x: role_ref[x] for x in role_ref}
- self.assertDictEqual(role_ref_dict, role)
- self.assertDictEqual(role_ref_dict, updated_role_ref)
-
- self.role_api.delete_role(role['id'])
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role['id'])
-
- def test_update_role_404(self):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.assertRaises(exception.RoleNotFound,
- self.role_api.update_role,
- role['id'],
- role)
-
- def test_list_roles(self):
- roles = self.role_api.list_roles()
- self.assertEqual(len(default_fixtures.ROLES), len(roles))
- role_ids = set(role['id'] for role in roles)
- expected_role_ids = set(role['id'] for role in default_fixtures.ROLES)
- self.assertEqual(expected_role_ids, role_ids)
-
- @unit.skip_if_cache_disabled('role')
- def test_cache_layer_role_crud(self):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- role_id = role['id']
- # Create role
- self.role_api.create_role(role_id, role)
- role_ref = self.role_api.get_role(role_id)
- updated_role_ref = copy.deepcopy(role_ref)
- updated_role_ref['name'] = uuid.uuid4().hex
- # Update role, bypassing the role api manager
- self.role_api.driver.update_role(role_id, updated_role_ref)
- # Verify get_role still returns old ref
- self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
- # Invalidate Cache
- self.role_api.get_role.invalidate(self.role_api, role_id)
- # Verify get_role returns the new role_ref
- self.assertDictEqual(updated_role_ref,
- self.role_api.get_role(role_id))
- # Update role back to original via the assignment api manager
- self.role_api.update_role(role_id, role_ref)
- # Verify get_role returns the original role ref
- self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
- # Delete role bypassing the role api manager
- self.role_api.driver.delete_role(role_id)
- # Verify get_role still returns the role_ref
- self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
- # Invalidate cache
- self.role_api.get_role.invalidate(self.role_api, role_id)
- # Verify RoleNotFound is now raised
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role_id)
- # recreate role
- self.role_api.create_role(role_id, role)
- self.role_api.get_role(role_id)
- # delete role via the assignment api manager
- self.role_api.delete_role(role_id)
- # verity RoleNotFound is now raised
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role_id)
diff --git a/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py b/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py
deleted file mode 100644
index 44f2b612..00000000
--- a/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# -*- coding: utf-8 -*-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit.backend import core_ldap
-from keystone.tests.unit.backend.role import core as core_role
-from keystone.tests.unit import default_fixtures
-
-
-CONF = cfg.CONF
-
-
-class LdapRoleCommon(core_ldap.BaseBackendLdapCommon, core_role.RoleTests):
- """Tests that should be run in every LDAP configuration.
-
- Include additional tests that are unique to LDAP (or need to be overridden)
- which should be run for all the various LDAP configurations we test.
-
- """
- pass
-
-
-class LdapRole(LdapRoleCommon, core_ldap.BaseBackendLdap, unit.TestCase):
- """Test in an all-LDAP configuration.
-
- Include additional tests that are unique to LDAP (or need to be overridden)
- which only need to be run in a basic LDAP configurations.
-
- """
- def test_configurable_allowed_role_actions(self):
- role = {'id': u'fäké1', 'name': u'fäké1'}
- self.role_api.create_role(u'fäké1', role)
- role_ref = self.role_api.get_role(u'fäké1')
- self.assertEqual(u'fäké1', role_ref['id'])
-
- role['name'] = u'fäké2'
- self.role_api.update_role(u'fäké1', role)
-
- self.role_api.delete_role(u'fäké1')
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- u'fäké1')
-
- def test_configurable_forbidden_role_actions(self):
- self.config_fixture.config(
- group='ldap', role_allow_create=False, role_allow_update=False,
- role_allow_delete=False)
- self.load_backends()
-
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.assertRaises(exception.ForbiddenAction,
- self.role_api.create_role,
- role['id'],
- role)
-
- self.role_member['name'] = uuid.uuid4().hex
- self.assertRaises(exception.ForbiddenAction,
- self.role_api.update_role,
- self.role_member['id'],
- self.role_member)
-
- self.assertRaises(exception.ForbiddenAction,
- self.role_api.delete_role,
- self.role_member['id'])
-
- def test_role_filter(self):
- role_ref = self.role_api.get_role(self.role_member['id'])
- self.assertDictEqual(role_ref, self.role_member)
-
- self.config_fixture.config(group='ldap',
- role_filter='(CN=DOES_NOT_MATCH)')
- self.load_backends()
- # NOTE(morganfainberg): CONF.ldap.role_filter will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change config values in tests that
- # could affect what the drivers would return up to the manager. This
- # solves this assumption when working with aggressive (on-create)
- # cache population.
- self.role_api.get_role.invalidate(self.role_api,
- self.role_member['id'])
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- self.role_member['id'])
-
- def test_role_attribute_mapping(self):
- self.config_fixture.config(group='ldap', role_name_attribute='ou')
- self.clear_database()
- self.load_backends()
- self.load_fixtures(default_fixtures)
- # NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change config values in tests that
- # could affect what the drivers would return up to the manager. This
- # solves this assumption when working with aggressive (on-create)
- # cache population.
- self.role_api.get_role.invalidate(self.role_api,
- self.role_member['id'])
- role_ref = self.role_api.get_role(self.role_member['id'])
- self.assertEqual(self.role_member['id'], role_ref['id'])
- self.assertEqual(self.role_member['name'], role_ref['name'])
-
- self.config_fixture.config(group='ldap', role_name_attribute='sn')
- self.load_backends()
- # NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change config values in tests that
- # could affect what the drivers would return up to the manager. This
- # solves this assumption when working with aggressive (on-create)
- # cache population.
- self.role_api.get_role.invalidate(self.role_api,
- self.role_member['id'])
- role_ref = self.role_api.get_role(self.role_member['id'])
- self.assertEqual(self.role_member['id'], role_ref['id'])
- self.assertNotIn('name', role_ref)
-
- def test_role_attribute_ignore(self):
- self.config_fixture.config(group='ldap',
- role_attribute_ignore=['name'])
- self.clear_database()
- self.load_backends()
- self.load_fixtures(default_fixtures)
- # NOTE(morganfainberg): CONF.ldap.role_attribute_ignore will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change config values in tests that
- # could affect what the drivers would return up to the manager. This
- # solves this assumption when working with aggressive (on-create)
- # cache population.
- self.role_api.get_role.invalidate(self.role_api,
- self.role_member['id'])
- role_ref = self.role_api.get_role(self.role_member['id'])
- self.assertEqual(self.role_member['id'], role_ref['id'])
- self.assertNotIn('name', role_ref)
-
-
-class LdapIdentitySqlEverythingElseRole(
- core_ldap.BaseBackendLdapIdentitySqlEverythingElse, LdapRoleCommon,
- unit.TestCase):
- """Test Identity in LDAP, Everything else in SQL."""
- pass
-
-
-class LdapIdentitySqlEverythingElseWithMappingRole(
- LdapIdentitySqlEverythingElseRole,
- core_ldap.BaseBackendLdapIdentitySqlEverythingElseWithMapping):
- """Test ID mapping of default LDAP backend."""
- pass
diff --git a/keystone-moon/keystone/tests/unit/backend/role/test_sql.py b/keystone-moon/keystone/tests/unit/backend/role/test_sql.py
deleted file mode 100644
index 79ff148a..00000000
--- a/keystone-moon/keystone/tests/unit/backend/role/test_sql.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystone.common import sql
-from keystone import exception
-from keystone.tests.unit.backend import core_sql
-from keystone.tests.unit.backend.role import core
-
-
-class SqlRoleModels(core_sql.BaseBackendSqlModels):
-
- def test_role_model(self):
- cols = (('id', sql.String, 64),
- ('name', sql.String, 255))
- self.assertExpectedSchema('role', cols)
-
-
-class SqlRole(core_sql.BaseBackendSqlTests, core.RoleTests):
-
- def test_create_null_role_name(self):
- role = {'id': uuid.uuid4().hex,
- 'name': None}
- self.assertRaises(exception.UnexpectedError,
- self.role_api.create_role,
- role['id'],
- role)
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- role['id'])
diff --git a/keystone-moon/keystone/tests/unit/catalog/__init__.py b/keystone-moon/keystone/tests/unit/catalog/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/catalog/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/catalog/test_backends.py b/keystone-moon/keystone/tests/unit/catalog/test_backends.py
deleted file mode 100644
index 55898015..00000000
--- a/keystone-moon/keystone/tests/unit/catalog/test_backends.py
+++ /dev/null
@@ -1,588 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-import mock
-from six.moves import range
-from testtools import matchers
-
-from keystone.catalog import core
-from keystone.common import driver_hints
-from keystone import exception
-from keystone.tests import unit
-
-
-class CatalogTests(object):
-
- _legacy_endpoint_id_in_endpoint = True
- _enabled_default_to_true_when_creating_endpoint = False
-
- def test_region_crud(self):
- # create
- region_id = '0' * 255
- new_region = unit.new_region_ref(id=region_id)
- res = self.catalog_api.create_region(new_region)
-
- # Ensure that we don't need to have a
- # parent_region_id in the original supplied
- # ref dict, but that it will be returned from
- # the endpoint, with None value.
- expected_region = new_region.copy()
- expected_region['parent_region_id'] = None
- self.assertDictEqual(expected_region, res)
-
- # Test adding another region with the one above
- # as its parent. We will check below whether deleting
- # the parent successfully deletes any child regions.
- parent_region_id = region_id
- new_region = unit.new_region_ref(parent_region_id=parent_region_id)
- region_id = new_region['id']
- res = self.catalog_api.create_region(new_region)
- self.assertDictEqual(new_region, res)
-
- # list
- regions = self.catalog_api.list_regions()
- self.assertThat(regions, matchers.HasLength(2))
- region_ids = [x['id'] for x in regions]
- self.assertIn(parent_region_id, region_ids)
- self.assertIn(region_id, region_ids)
-
- # update
- region_desc_update = {'description': uuid.uuid4().hex}
- res = self.catalog_api.update_region(region_id, region_desc_update)
- expected_region = new_region.copy()
- expected_region['description'] = region_desc_update['description']
- self.assertDictEqual(expected_region, res)
-
- # delete
- self.catalog_api.delete_region(parent_region_id)
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.delete_region,
- parent_region_id)
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- parent_region_id)
- # Ensure the child is also gone...
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_id)
-
- def _create_region_with_parent_id(self, parent_id=None):
- new_region = unit.new_region_ref(parent_region_id=parent_id)
- self.catalog_api.create_region(new_region)
- return new_region
-
- def test_list_regions_filtered_by_parent_region_id(self):
- new_region = self._create_region_with_parent_id()
- parent_id = new_region['id']
- new_region = self._create_region_with_parent_id(parent_id)
- new_region = self._create_region_with_parent_id(parent_id)
-
- # filter by parent_region_id
- hints = driver_hints.Hints()
- hints.add_filter('parent_region_id', parent_id)
- regions = self.catalog_api.list_regions(hints)
- for region in regions:
- self.assertEqual(parent_id, region['parent_region_id'])
-
- @unit.skip_if_cache_disabled('catalog')
- def test_cache_layer_region_crud(self):
- new_region = unit.new_region_ref()
- region_id = new_region['id']
- self.catalog_api.create_region(new_region.copy())
- updated_region = copy.deepcopy(new_region)
- updated_region['description'] = uuid.uuid4().hex
- # cache the result
- self.catalog_api.get_region(region_id)
- # update the region bypassing catalog_api
- self.catalog_api.driver.update_region(region_id, updated_region)
- self.assertDictContainsSubset(new_region,
- self.catalog_api.get_region(region_id))
- self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
- self.assertDictContainsSubset(updated_region,
- self.catalog_api.get_region(region_id))
- # delete the region
- self.catalog_api.driver.delete_region(region_id)
- # still get the old region
- self.assertDictContainsSubset(updated_region,
- self.catalog_api.get_region(region_id))
- self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region, region_id)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_region(self):
- new_region = unit.new_region_ref()
- region_id = new_region['id']
- self.catalog_api.create_region(new_region)
-
- # cache the region
- self.catalog_api.get_region(region_id)
-
- # update the region via catalog_api
- new_description = {'description': uuid.uuid4().hex}
- self.catalog_api.update_region(region_id, new_description)
-
- # assert that we can get the new region
- current_region = self.catalog_api.get_region(region_id)
- self.assertEqual(new_description['description'],
- current_region['description'])
-
- def test_create_region_with_duplicate_id(self):
- new_region = unit.new_region_ref()
- self.catalog_api.create_region(new_region)
- # Create region again with duplicate id
- self.assertRaises(exception.Conflict,
- self.catalog_api.create_region,
- new_region)
-
- def test_get_region_returns_not_found(self):
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- uuid.uuid4().hex)
-
- def test_delete_region_returns_not_found(self):
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.delete_region,
- uuid.uuid4().hex)
-
- def test_create_region_invalid_parent_region_returns_not_found(self):
- new_region = unit.new_region_ref(parent_region_id='nonexisting')
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.create_region,
- new_region)
-
- def test_avoid_creating_circular_references_in_regions_update(self):
- region_one = self._create_region_with_parent_id()
-
- # self circle: region_one->region_one
- self.assertRaises(exception.CircularRegionHierarchyError,
- self.catalog_api.update_region,
- region_one['id'],
- {'parent_region_id': region_one['id']})
-
- # region_one->region_two->region_one
- region_two = self._create_region_with_parent_id(region_one['id'])
- self.assertRaises(exception.CircularRegionHierarchyError,
- self.catalog_api.update_region,
- region_one['id'],
- {'parent_region_id': region_two['id']})
-
- # region_one region_two->region_three->region_four->region_two
- region_three = self._create_region_with_parent_id(region_two['id'])
- region_four = self._create_region_with_parent_id(region_three['id'])
- self.assertRaises(exception.CircularRegionHierarchyError,
- self.catalog_api.update_region,
- region_two['id'],
- {'parent_region_id': region_four['id']})
-
- @mock.patch.object(core.CatalogDriverV8,
- "_ensure_no_circle_in_hierarchical_regions")
- def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle):
- # turn off the enforcement so that cycles can be created for the test
- mock_ensure_on_circle.return_value = None
-
- region_one = self._create_region_with_parent_id()
-
- # self circle: region_one->region_one
- self.catalog_api.update_region(
- region_one['id'],
- {'parent_region_id': region_one['id']})
- self.catalog_api.delete_region(region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_one['id'])
-
- # region_one->region_two->region_one
- region_one = self._create_region_with_parent_id()
- region_two = self._create_region_with_parent_id(region_one['id'])
- self.catalog_api.update_region(
- region_one['id'],
- {'parent_region_id': region_two['id']})
- self.catalog_api.delete_region(region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_two['id'])
-
- # region_one->region_two->region_three->region_one
- region_one = self._create_region_with_parent_id()
- region_two = self._create_region_with_parent_id(region_one['id'])
- region_three = self._create_region_with_parent_id(region_two['id'])
- self.catalog_api.update_region(
- region_one['id'],
- {'parent_region_id': region_three['id']})
- self.catalog_api.delete_region(region_two['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_two['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_three['id'])
-
- def test_service_crud(self):
- # create
- new_service = unit.new_service_ref()
- service_id = new_service['id']
- res = self.catalog_api.create_service(service_id, new_service)
- self.assertDictEqual(new_service, res)
-
- # list
- services = self.catalog_api.list_services()
- self.assertIn(service_id, [x['id'] for x in services])
-
- # update
- service_name_update = {'name': uuid.uuid4().hex}
- res = self.catalog_api.update_service(service_id, service_name_update)
- expected_service = new_service.copy()
- expected_service['name'] = service_name_update['name']
- self.assertDictEqual(expected_service, res)
-
- # delete
- self.catalog_api.delete_service(service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.delete_service,
- service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.get_service,
- service_id)
-
- def _create_random_service(self):
- new_service = unit.new_service_ref()
- service_id = new_service['id']
- return self.catalog_api.create_service(service_id, new_service)
-
- def test_service_filtering(self):
- target_service = self._create_random_service()
- unrelated_service1 = self._create_random_service()
- unrelated_service2 = self._create_random_service()
-
- # filter by type
- hint_for_type = driver_hints.Hints()
- hint_for_type.add_filter(name="type", value=target_service['type'])
- services = self.catalog_api.list_services(hint_for_type)
-
- self.assertEqual(1, len(services))
- filtered_service = services[0]
- self.assertEqual(target_service['type'], filtered_service['type'])
- self.assertEqual(target_service['id'], filtered_service['id'])
-
- # filter should have been removed, since it was already used by the
- # backend
- self.assertEqual(0, len(hint_for_type.filters))
-
- # the backend shouldn't filter by name, since this is handled by the
- # front end
- hint_for_name = driver_hints.Hints()
- hint_for_name.add_filter(name="name", value=target_service['name'])
- services = self.catalog_api.list_services(hint_for_name)
-
- self.assertEqual(3, len(services))
-
- # filter should still be there, since it wasn't used by the backend
- self.assertEqual(1, len(hint_for_name.filters))
-
- self.catalog_api.delete_service(target_service['id'])
- self.catalog_api.delete_service(unrelated_service1['id'])
- self.catalog_api.delete_service(unrelated_service2['id'])
-
- @unit.skip_if_cache_disabled('catalog')
- def test_cache_layer_service_crud(self):
- new_service = unit.new_service_ref()
- service_id = new_service['id']
- res = self.catalog_api.create_service(service_id, new_service)
- self.assertDictEqual(new_service, res)
- self.catalog_api.get_service(service_id)
- updated_service = copy.deepcopy(new_service)
- updated_service['description'] = uuid.uuid4().hex
- # update bypassing catalog api
- self.catalog_api.driver.update_service(service_id, updated_service)
- self.assertDictContainsSubset(new_service,
- self.catalog_api.get_service(service_id))
- self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
- self.assertDictContainsSubset(updated_service,
- self.catalog_api.get_service(service_id))
-
- # delete bypassing catalog api
- self.catalog_api.driver.delete_service(service_id)
- self.assertDictContainsSubset(updated_service,
- self.catalog_api.get_service(service_id))
- self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.delete_service,
- service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.get_service,
- service_id)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_service(self):
- new_service = unit.new_service_ref()
- service_id = new_service['id']
- self.catalog_api.create_service(service_id, new_service)
-
- # cache the service
- self.catalog_api.get_service(service_id)
-
- # update the service via catalog api
- new_type = {'type': uuid.uuid4().hex}
- self.catalog_api.update_service(service_id, new_type)
-
- # assert that we can get the new service
- current_service = self.catalog_api.get_service(service_id)
- self.assertEqual(new_type['type'], current_service['type'])
-
- def test_delete_service_with_endpoint(self):
- # create a service
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- # create an endpoint attached to the service
- endpoint = unit.new_endpoint_ref(service_id=service['id'],
- region_id=None)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
-
- # deleting the service should also delete the endpoint
- self.catalog_api.delete_service(service['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- endpoint['id'])
-
- def test_cache_layer_delete_service_with_endpoint(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- # create an endpoint attached to the service
- endpoint = unit.new_endpoint_ref(service_id=service['id'],
- region_id=None)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- # cache the result
- self.catalog_api.get_service(service['id'])
- self.catalog_api.get_endpoint(endpoint['id'])
- # delete the service bypassing catalog api
- self.catalog_api.driver.delete_service(service['id'])
- self.assertDictContainsSubset(endpoint,
- self.catalog_api.
- get_endpoint(endpoint['id']))
- self.assertDictContainsSubset(service,
- self.catalog_api.
- get_service(service['id']))
- self.catalog_api.get_endpoint.invalidate(self.catalog_api,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- endpoint['id'])
- # multiple endpoints associated with a service
- second_endpoint = unit.new_endpoint_ref(service_id=service['id'],
- region_id=None)
- self.catalog_api.create_service(service['id'], service)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- self.catalog_api.create_endpoint(second_endpoint['id'],
- second_endpoint)
- self.catalog_api.delete_service(service['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- second_endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- second_endpoint['id'])
-
- def test_get_service_returns_not_found(self):
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.get_service,
- uuid.uuid4().hex)
-
- def test_delete_service_returns_not_found(self):
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.delete_service,
- uuid.uuid4().hex)
-
- def test_create_endpoint_nonexistent_service(self):
- endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex,
- region_id=None)
- self.assertRaises(exception.ValidationError,
- self.catalog_api.create_endpoint,
- endpoint['id'],
- endpoint)
-
- def test_update_endpoint_nonexistent_service(self):
- dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
- self._create_endpoints())
- new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex)
- self.assertRaises(exception.ValidationError,
- self.catalog_api.update_endpoint,
- enabled_endpoint['id'],
- new_endpoint)
-
- def test_create_endpoint_nonexistent_region(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- endpoint = unit.new_endpoint_ref(service_id=service['id'])
- self.assertRaises(exception.ValidationError,
- self.catalog_api.create_endpoint,
- endpoint['id'],
- endpoint)
-
- def test_update_endpoint_nonexistent_region(self):
- dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
- self._create_endpoints())
- new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex)
- self.assertRaises(exception.ValidationError,
- self.catalog_api.update_endpoint,
- enabled_endpoint['id'],
- new_endpoint)
-
- def test_get_endpoint_returns_not_found(self):
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- uuid.uuid4().hex)
-
- def test_delete_endpoint_returns_not_found(self):
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- uuid.uuid4().hex)
-
- def test_create_endpoint(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- endpoint = unit.new_endpoint_ref(service_id=service['id'],
- region_id=None)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
-
- def test_update_endpoint(self):
- dummy_service_ref, endpoint_ref, dummy_disabled_endpoint_ref = (
- self._create_endpoints())
- res = self.catalog_api.update_endpoint(endpoint_ref['id'],
- {'interface': 'private'})
- expected_endpoint = endpoint_ref.copy()
- expected_endpoint['enabled'] = True
- expected_endpoint['interface'] = 'private'
- if self._legacy_endpoint_id_in_endpoint:
- expected_endpoint['legacy_endpoint_id'] = None
- if self._enabled_default_to_true_when_creating_endpoint:
- expected_endpoint['enabled'] = True
- self.assertDictEqual(expected_endpoint, res)
-
- def _create_endpoints(self):
- # Creates a service and 2 endpoints for the service in the same region.
- # The 'public' interface is enabled and the 'internal' interface is
- # disabled.
-
- def create_endpoint(service_id, region, **kwargs):
- ref = unit.new_endpoint_ref(
- service_id=service_id,
- region_id=region,
- url='http://localhost/%s' % uuid.uuid4().hex,
- **kwargs)
-
- self.catalog_api.create_endpoint(ref['id'], ref)
- return ref
-
- # Create a service for use with the endpoints.
- service_ref = unit.new_service_ref()
- service_id = service_ref['id']
- self.catalog_api.create_service(service_id, service_ref)
-
- region = unit.new_region_ref()
- self.catalog_api.create_region(region)
-
- # Create endpoints
- enabled_endpoint_ref = create_endpoint(service_id, region['id'])
- disabled_endpoint_ref = create_endpoint(
- service_id, region['id'], enabled=False, interface='internal')
-
- return service_ref, enabled_endpoint_ref, disabled_endpoint_ref
-
- def test_list_endpoints(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- expected_ids = set([uuid.uuid4().hex for _ in range(3)])
- for endpoint_id in expected_ids:
- endpoint = unit.new_endpoint_ref(service_id=service['id'],
- id=endpoint_id,
- region_id=None)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
-
- endpoints = self.catalog_api.list_endpoints()
- self.assertEqual(expected_ids, set(e['id'] for e in endpoints))
-
- def test_get_catalog_endpoint_disabled(self):
- """Get back only enabled endpoints when get the v2 catalog."""
- service_ref, enabled_endpoint_ref, dummy_disabled_endpoint_ref = (
- self._create_endpoints())
-
- user_id = uuid.uuid4().hex
- project_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_catalog(user_id, project_id)
-
- exp_entry = {
- 'id': enabled_endpoint_ref['id'],
- 'name': service_ref['name'],
- 'publicURL': enabled_endpoint_ref['url'],
- }
-
- region = enabled_endpoint_ref['region_id']
- self.assertEqual(exp_entry, catalog[region][service_ref['type']])
-
- def test_get_v3_catalog_endpoint_disabled(self):
- """Get back only enabled endpoints when get the v3 catalog."""
- enabled_endpoint_ref = self._create_endpoints()[1]
-
- user_id = uuid.uuid4().hex
- project_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_v3_catalog(user_id, project_id)
-
- endpoint_ids = [x['id'] for x in catalog[0]['endpoints']]
- self.assertEqual([enabled_endpoint_ref['id']], endpoint_ids)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_endpoint(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- # create an endpoint attached to the service
- endpoint = unit.new_endpoint_ref(service_id=service['id'],
- region_id=None)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
-
- # cache the endpoint
- self.catalog_api.get_endpoint(endpoint['id'])
-
- # update the endpoint via catalog api
- new_url = {'url': uuid.uuid4().hex}
- self.catalog_api.update_endpoint(endpoint['id'], new_url)
-
- # assert that we can get the new endpoint
- current_endpoint = self.catalog_api.get_endpoint(endpoint['id'])
- self.assertEqual(new_url['url'], current_endpoint['url'])
diff --git a/keystone-moon/keystone/tests/unit/catalog/test_core.py b/keystone-moon/keystone/tests/unit/catalog/test_core.py
deleted file mode 100644
index b04b0bb7..00000000
--- a/keystone-moon/keystone/tests/unit/catalog/test_core.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystone.catalog import core
-from keystone import exception
-from keystone.tests import unit
-
-
-class FormatUrlTests(unit.BaseTestCase):
-
- def test_successful_formatting(self):
- url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
- '$(tenant_id)s/$(user_id)s/$(project_id)s')
- project_id = uuid.uuid4().hex
- values = {'public_bind_host': 'server', 'admin_port': 9090,
- 'tenant_id': 'A', 'user_id': 'B', 'project_id': project_id}
- actual_url = core.format_url(url_template, values)
-
- expected_url = 'http://server:9090/A/B/%s' % (project_id,)
- self.assertEqual(expected_url, actual_url)
-
- def test_raises_malformed_on_missing_key(self):
- self.assertRaises(exception.MalformedEndpoint,
- core.format_url,
- "http://$(public_bind_host)s/$(public_port)d",
- {"public_bind_host": "1"})
-
- def test_raises_malformed_on_wrong_type(self):
- self.assertRaises(exception.MalformedEndpoint,
- core.format_url,
- "http://$(public_bind_host)d",
- {"public_bind_host": "something"})
-
- def test_raises_malformed_on_incomplete_format(self):
- self.assertRaises(exception.MalformedEndpoint,
- core.format_url,
- "http://$(public_bind_host)",
- {"public_bind_host": "1"})
-
- def test_formatting_a_non_string(self):
- def _test(url_template):
- self.assertRaises(exception.MalformedEndpoint,
- core.format_url,
- url_template,
- {})
-
- _test(None)
- _test(object())
-
- def test_substitution_with_key_not_allowed(self):
- # If the url template contains a substitution that's not in the allowed
- # list then MalformedEndpoint is raised.
- # For example, admin_token isn't allowed.
- url_template = ('http://$(public_bind_host)s:$(public_port)d/'
- '$(tenant_id)s/$(user_id)s/$(admin_token)s')
- values = {'public_bind_host': 'server', 'public_port': 9090,
- 'tenant_id': 'A', 'user_id': 'B', 'admin_token': 'C'}
- self.assertRaises(exception.MalformedEndpoint,
- core.format_url,
- url_template,
- values)
-
- def test_substitution_with_allowed_tenant_keyerror(self):
- # No value of 'tenant_id' is passed into url_template.
- # mod: format_url will return None instead of raising
- # "MalformedEndpoint" exception.
- # This is intentional behavior since we don't want to skip
- # all the later endpoints once there is an URL of endpoint
- # trying to replace 'tenant_id' with None.
- url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
- '$(tenant_id)s/$(user_id)s')
- values = {'public_bind_host': 'server', 'admin_port': 9090,
- 'user_id': 'B'}
- self.assertIsNone(core.format_url(url_template, values,
- silent_keyerror_failures=['tenant_id']))
-
- def test_substitution_with_allowed_project_keyerror(self):
- # No value of 'project_id' is passed into url_template.
- # mod: format_url will return None instead of raising
- # "MalformedEndpoint" exception.
- # This is intentional behavior since we don't want to skip
- # all the later endpoints once there is an URL of endpoint
- # trying to replace 'project_id' with None.
- url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
- '$(project_id)s/$(user_id)s')
- values = {'public_bind_host': 'server', 'admin_port': 9090,
- 'user_id': 'B'}
- self.assertIsNone(core.format_url(url_template, values,
- silent_keyerror_failures=['project_id']))
diff --git a/keystone-moon/keystone/tests/unit/common/__init__.py b/keystone-moon/keystone/tests/unit/common/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/common/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/common/test_authorization.py b/keystone-moon/keystone/tests/unit/common/test_authorization.py
deleted file mode 100644
index 73ddbc61..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_authorization.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright 2015 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import copy
-import uuid
-
-from keystone.common import authorization
-from keystone import exception
-from keystone.federation import constants as federation_constants
-from keystone.models import token_model
-from keystone.tests import unit
-from keystone.tests.unit import test_token_provider
-
-
-class TestTokenToAuthContext(unit.BaseTestCase):
- def test_token_is_project_scoped_with_trust(self):
- # Check auth_context result when the token is project-scoped and has
- # trust info.
-
- # SAMPLE_V3_TOKEN has OS-TRUST:trust in it.
- token_data = test_token_provider.SAMPLE_V3_TOKEN
- token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=token_data)
-
- auth_context = authorization.token_to_auth_context(token)
-
- self.assertEqual(token, auth_context['token'])
- self.assertTrue(auth_context['is_delegated_auth'])
- self.assertEqual(token_data['token']['user']['id'],
- auth_context['user_id'])
- self.assertEqual(token_data['token']['user']['domain']['id'],
- auth_context['user_domain_id'])
- self.assertEqual(token_data['token']['project']['id'],
- auth_context['project_id'])
- self.assertEqual(token_data['token']['project']['domain']['id'],
- auth_context['project_domain_id'])
- self.assertNotIn('domain_id', auth_context)
- self.assertNotIn('domain_name', auth_context)
- self.assertEqual(token_data['token']['OS-TRUST:trust']['id'],
- auth_context['trust_id'])
- self.assertEqual(
- token_data['token']['OS-TRUST:trust']['trustor_user_id'],
- auth_context['trustor_id'])
- self.assertEqual(
- token_data['token']['OS-TRUST:trust']['trustee_user_id'],
- auth_context['trustee_id'])
- self.assertItemsEqual(
- [r['name'] for r in token_data['token']['roles']],
- auth_context['roles'])
- self.assertIsNone(auth_context['consumer_id'])
- self.assertIsNone(auth_context['access_token_id'])
- self.assertNotIn('group_ids', auth_context)
-
- def test_token_is_domain_scoped(self):
- # Check contents of auth_context when token is domain-scoped.
- token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
- del token_data['token']['project']
-
- domain_id = uuid.uuid4().hex
- domain_name = uuid.uuid4().hex
- token_data['token']['domain'] = {'id': domain_id, 'name': domain_name}
-
- token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=token_data)
-
- auth_context = authorization.token_to_auth_context(token)
-
- self.assertNotIn('project_id', auth_context)
- self.assertNotIn('project_domain_id', auth_context)
-
- self.assertEqual(domain_id, auth_context['domain_id'])
- self.assertEqual(domain_name, auth_context['domain_name'])
-
- def test_token_is_unscoped(self):
- # Check contents of auth_context when the token is unscoped.
- token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
- del token_data['token']['project']
-
- token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=token_data)
-
- auth_context = authorization.token_to_auth_context(token)
-
- self.assertNotIn('project_id', auth_context)
- self.assertNotIn('project_domain_id', auth_context)
- self.assertNotIn('domain_id', auth_context)
- self.assertNotIn('domain_name', auth_context)
-
- def test_token_is_for_federated_user(self):
- # When the token is for a federated user then group_ids is in
- # auth_context.
- token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
-
- group_ids = [uuid.uuid4().hex for x in range(1, 5)]
-
- federation_data = {'identity_provider': {'id': uuid.uuid4().hex},
- 'protocol': {'id': 'saml2'},
- 'groups': [{'id': gid} for gid in group_ids]}
- token_data['token']['user'][federation_constants.FEDERATION] = (
- federation_data)
-
- token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=token_data)
-
- auth_context = authorization.token_to_auth_context(token)
-
- self.assertItemsEqual(group_ids, auth_context['group_ids'])
-
- def test_oauth_variables_set_for_oauth_token(self):
- token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
- access_token_id = uuid.uuid4().hex
- consumer_id = uuid.uuid4().hex
- token_data['token']['OS-OAUTH1'] = {'access_token_id': access_token_id,
- 'consumer_id': consumer_id}
- token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=token_data)
-
- auth_context = authorization.token_to_auth_context(token)
-
- self.assertEqual(access_token_id, auth_context['access_token_id'])
- self.assertEqual(consumer_id, auth_context['consumer_id'])
-
- def test_oauth_variables_not_set(self):
- token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
- token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=token_data)
-
- auth_context = authorization.token_to_auth_context(token)
-
- self.assertIsNone(auth_context['access_token_id'])
- self.assertIsNone(auth_context['consumer_id'])
-
- def test_token_is_not_KeystoneToken_raises_exception(self):
- # If the token isn't a KeystoneToken then an UnexpectedError exception
- # is raised.
- self.assertRaises(exception.UnexpectedError,
- authorization.token_to_auth_context, {})
-
- def test_user_id_missing_in_token_raises_exception(self):
- # If there's no user ID in the token then an Unauthorized
- # exception is raised.
- token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
- del token_data['token']['user']['id']
-
- token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=token_data)
-
- self.assertRaises(exception.Unauthorized,
- authorization.token_to_auth_context, token)
diff --git a/keystone-moon/keystone/tests/unit/common/test_base64utils.py b/keystone-moon/keystone/tests/unit/common/test_base64utils.py
deleted file mode 100644
index 355a2e03..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_base64utils.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# Copyright 2013 Red Hat, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.common import base64utils
-from keystone.tests import unit
-
-base64_alphabet = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
- 'abcdefghijklmnopqrstuvwxyz'
- '0123456789'
- '+/=') # includes pad char
-
-base64url_alphabet = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
- 'abcdefghijklmnopqrstuvwxyz'
- '0123456789'
- '-_=') # includes pad char
-
-
-class TestValid(unit.BaseTestCase):
- def test_valid_base64(self):
- self.assertTrue(base64utils.is_valid_base64('+/=='))
- self.assertTrue(base64utils.is_valid_base64('+/+='))
- self.assertTrue(base64utils.is_valid_base64('+/+/'))
-
- self.assertFalse(base64utils.is_valid_base64('-_=='))
- self.assertFalse(base64utils.is_valid_base64('-_-='))
- self.assertFalse(base64utils.is_valid_base64('-_-_'))
-
- self.assertTrue(base64utils.is_valid_base64('abcd'))
- self.assertFalse(base64utils.is_valid_base64('abcde'))
- self.assertFalse(base64utils.is_valid_base64('abcde=='))
- self.assertFalse(base64utils.is_valid_base64('abcdef'))
- self.assertTrue(base64utils.is_valid_base64('abcdef=='))
- self.assertFalse(base64utils.is_valid_base64('abcdefg'))
- self.assertTrue(base64utils.is_valid_base64('abcdefg='))
- self.assertTrue(base64utils.is_valid_base64('abcdefgh'))
-
- self.assertFalse(base64utils.is_valid_base64('-_=='))
-
- def test_valid_base64url(self):
- self.assertFalse(base64utils.is_valid_base64url('+/=='))
- self.assertFalse(base64utils.is_valid_base64url('+/+='))
- self.assertFalse(base64utils.is_valid_base64url('+/+/'))
-
- self.assertTrue(base64utils.is_valid_base64url('-_=='))
- self.assertTrue(base64utils.is_valid_base64url('-_-='))
- self.assertTrue(base64utils.is_valid_base64url('-_-_'))
-
- self.assertTrue(base64utils.is_valid_base64url('abcd'))
- self.assertFalse(base64utils.is_valid_base64url('abcde'))
- self.assertFalse(base64utils.is_valid_base64url('abcde=='))
- self.assertFalse(base64utils.is_valid_base64url('abcdef'))
- self.assertTrue(base64utils.is_valid_base64url('abcdef=='))
- self.assertFalse(base64utils.is_valid_base64url('abcdefg'))
- self.assertTrue(base64utils.is_valid_base64url('abcdefg='))
- self.assertTrue(base64utils.is_valid_base64url('abcdefgh'))
-
- self.assertTrue(base64utils.is_valid_base64url('-_=='))
-
-
-class TestBase64Padding(unit.BaseTestCase):
-
- def test_filter(self):
- self.assertEqual('', base64utils.filter_formatting(''))
- self.assertEqual('', base64utils.filter_formatting(' '))
- self.assertEqual('a', base64utils.filter_formatting('a'))
- self.assertEqual('a', base64utils.filter_formatting(' a'))
- self.assertEqual('a', base64utils.filter_formatting('a '))
- self.assertEqual('ab', base64utils.filter_formatting('ab'))
- self.assertEqual('ab', base64utils.filter_formatting(' ab'))
- self.assertEqual('ab', base64utils.filter_formatting('ab '))
- self.assertEqual('ab', base64utils.filter_formatting('a b'))
- self.assertEqual('ab', base64utils.filter_formatting(' a b'))
- self.assertEqual('ab', base64utils.filter_formatting('a b '))
- self.assertEqual('ab', base64utils.filter_formatting('a\nb\n '))
-
- text = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
- 'abcdefghijklmnopqrstuvwxyz'
- '0123456789'
- '+/=')
- self.assertEqual(base64_alphabet,
- base64utils.filter_formatting(text))
-
- text = (' ABCDEFGHIJKLMNOPQRSTUVWXYZ\n'
- ' abcdefghijklmnopqrstuvwxyz\n'
- '\t\f\r'
- ' 0123456789\n'
- ' +/=')
- self.assertEqual(base64_alphabet,
- base64utils.filter_formatting(text))
- self.assertEqual(base64url_alphabet,
- base64utils.base64_to_base64url(base64_alphabet))
-
- text = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
- 'abcdefghijklmnopqrstuvwxyz'
- '0123456789'
- '-_=')
- self.assertEqual(base64url_alphabet,
- base64utils.filter_formatting(text))
-
- text = (' ABCDEFGHIJKLMNOPQRSTUVWXYZ\n'
- ' abcdefghijklmnopqrstuvwxyz\n'
- '\t\f\r'
- ' 0123456789\n'
- '-_=')
- self.assertEqual(base64url_alphabet,
- base64utils.filter_formatting(text))
-
- def test_alphabet_conversion(self):
- self.assertEqual(base64url_alphabet,
- base64utils.base64_to_base64url(base64_alphabet))
-
- self.assertEqual(base64_alphabet,
- base64utils.base64url_to_base64(base64url_alphabet))
-
- def test_is_padded(self):
- self.assertTrue(base64utils.base64_is_padded('ABCD'))
- self.assertTrue(base64utils.base64_is_padded('ABC='))
- self.assertTrue(base64utils.base64_is_padded('AB=='))
-
- self.assertTrue(base64utils.base64_is_padded('1234ABCD'))
- self.assertTrue(base64utils.base64_is_padded('1234ABC='))
- self.assertTrue(base64utils.base64_is_padded('1234AB=='))
-
- self.assertFalse(base64utils.base64_is_padded('ABC'))
- self.assertFalse(base64utils.base64_is_padded('AB'))
- self.assertFalse(base64utils.base64_is_padded('A'))
- self.assertFalse(base64utils.base64_is_padded(''))
-
- self.assertRaises(base64utils.InvalidBase64Error,
- base64utils.base64_is_padded, '=')
-
- self.assertRaises(base64utils.InvalidBase64Error,
- base64utils.base64_is_padded, 'AB=C')
-
- self.assertRaises(base64utils.InvalidBase64Error,
- base64utils.base64_is_padded, 'AB=')
-
- self.assertRaises(base64utils.InvalidBase64Error,
- base64utils.base64_is_padded, 'ABCD=')
-
- self.assertRaises(ValueError, base64utils.base64_is_padded,
- 'ABC', pad='==')
- self.assertRaises(base64utils.InvalidBase64Error,
- base64utils.base64_is_padded, 'A=BC')
-
- def test_strip_padding(self):
- self.assertEqual('ABCD', base64utils.base64_strip_padding('ABCD'))
- self.assertEqual('ABC', base64utils.base64_strip_padding('ABC='))
- self.assertEqual('AB', base64utils.base64_strip_padding('AB=='))
- self.assertRaises(ValueError, base64utils.base64_strip_padding,
- 'ABC=', pad='==')
- self.assertEqual('ABC', base64utils.base64_strip_padding('ABC'))
-
- def test_assure_padding(self):
- self.assertEqual('ABCD', base64utils.base64_assure_padding('ABCD'))
- self.assertEqual('ABC=', base64utils.base64_assure_padding('ABC'))
- self.assertEqual('ABC=', base64utils.base64_assure_padding('ABC='))
- self.assertEqual('AB==', base64utils.base64_assure_padding('AB'))
- self.assertEqual('AB==', base64utils.base64_assure_padding('AB=='))
- self.assertRaises(ValueError, base64utils.base64_assure_padding,
- 'ABC', pad='==')
-
- def test_base64_percent_encoding(self):
- self.assertEqual('ABCD', base64utils.base64url_percent_encode('ABCD'))
- self.assertEqual('ABC%3D',
- base64utils.base64url_percent_encode('ABC='))
- self.assertEqual('AB%3D%3D',
- base64utils.base64url_percent_encode('AB=='))
-
- self.assertEqual('ABCD', base64utils.base64url_percent_decode('ABCD'))
- self.assertEqual('ABC=',
- base64utils.base64url_percent_decode('ABC%3D'))
- self.assertEqual('AB==',
- base64utils.base64url_percent_decode('AB%3D%3D'))
- self.assertRaises(base64utils.InvalidBase64Error,
- base64utils.base64url_percent_encode, 'chars')
- self.assertRaises(base64utils.InvalidBase64Error,
- base64utils.base64url_percent_decode, 'AB%3D%3')
-
-
-class TestTextWrap(unit.BaseTestCase):
-
- def test_wrapping(self):
- raw_text = 'abcdefgh'
- wrapped_text = 'abc\ndef\ngh\n'
-
- self.assertEqual(wrapped_text,
- base64utils.base64_wrap(raw_text, width=3))
-
- t = '\n'.join(base64utils.base64_wrap_iter(raw_text, width=3)) + '\n'
- self.assertEqual(wrapped_text, t)
-
- raw_text = 'abcdefgh'
- wrapped_text = 'abcd\nefgh\n'
-
- self.assertEqual(wrapped_text,
- base64utils.base64_wrap(raw_text, width=4))
diff --git a/keystone-moon/keystone/tests/unit/common/test_connection_pool.py b/keystone-moon/keystone/tests/unit/common/test_connection_pool.py
deleted file mode 100644
index 3813e033..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_connection_pool.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import threading
-import time
-
-import mock
-import six
-from six.moves import queue
-import testtools
-from testtools import matchers
-
-from keystone.common.cache import _memcache_pool
-from keystone import exception
-from keystone.tests.unit import core
-
-
-class _TestConnectionPool(_memcache_pool.ConnectionPool):
- destroyed_value = 'destroyed'
-
- def _create_connection(self):
- return mock.MagicMock()
-
- def _destroy_connection(self, conn):
- conn(self.destroyed_value)
-
-
-class TestConnectionPool(core.TestCase):
- def setUp(self):
- super(TestConnectionPool, self).setUp()
- self.unused_timeout = 10
- self.maxsize = 2
- self.connection_pool = _TestConnectionPool(
- maxsize=self.maxsize,
- unused_timeout=self.unused_timeout)
- self.addCleanup(self.cleanup_instance('connection_pool'))
-
- def test_get_context_manager(self):
- self.assertThat(self.connection_pool.queue, matchers.HasLength(0))
- with self.connection_pool.acquire() as conn:
- self.assertEqual(1, self.connection_pool._acquired)
- self.assertEqual(0, self.connection_pool._acquired)
- self.assertThat(self.connection_pool.queue, matchers.HasLength(1))
- self.assertEqual(conn, self.connection_pool.queue[0].connection)
-
- def test_cleanup_pool(self):
- self.test_get_context_manager()
- newtime = time.time() + self.unused_timeout * 2
- non_expired_connection = _memcache_pool._PoolItem(
- ttl=(newtime * 2),
- connection=mock.MagicMock())
- self.connection_pool.queue.append(non_expired_connection)
- self.assertThat(self.connection_pool.queue, matchers.HasLength(2))
- with mock.patch.object(time, 'time', return_value=newtime):
- conn = self.connection_pool.queue[0].connection
- with self.connection_pool.acquire():
- pass
- conn.assert_has_calls(
- [mock.call(self.connection_pool.destroyed_value)])
- self.assertThat(self.connection_pool.queue, matchers.HasLength(1))
- self.assertEqual(0, non_expired_connection.connection.call_count)
-
- def test_acquire_conn_exception_returns_acquired_count(self):
- class TestException(Exception):
- pass
-
- with mock.patch.object(_TestConnectionPool, '_create_connection',
- side_effect=TestException):
- with testtools.ExpectedException(TestException):
- with self.connection_pool.acquire():
- pass
- self.assertThat(self.connection_pool.queue,
- matchers.HasLength(0))
- self.assertEqual(0, self.connection_pool._acquired)
-
- def test_connection_pool_limits_maximum_connections(self):
- # NOTE(morganfainberg): To ensure we don't lockup tests until the
- # job limit, explicitly call .get_nowait() and .put_nowait() in this
- # case.
- conn1 = self.connection_pool.get_nowait()
- conn2 = self.connection_pool.get_nowait()
-
- # Use a nowait version to raise an Empty exception indicating we would
- # not get another connection until one is placed back into the queue.
- self.assertRaises(queue.Empty, self.connection_pool.get_nowait)
-
- # Place the connections back into the pool.
- self.connection_pool.put_nowait(conn1)
- self.connection_pool.put_nowait(conn2)
-
- # Make sure we can get a connection out of the pool again.
- self.connection_pool.get_nowait()
-
- def test_connection_pool_maximum_connection_get_timeout(self):
- connection_pool = _TestConnectionPool(
- maxsize=1,
- unused_timeout=self.unused_timeout,
- conn_get_timeout=0)
-
- def _acquire_connection():
- with connection_pool.acquire():
- pass
-
- # Make sure we've consumed the only available connection from the pool
- conn = connection_pool.get_nowait()
-
- self.assertRaises(exception.UnexpectedError, _acquire_connection)
-
- # Put the connection back and ensure we can acquire the connection
- # after it is available.
- connection_pool.put_nowait(conn)
- _acquire_connection()
-
-
-class TestMemcacheClientOverrides(core.BaseTestCase):
-
- def test_client_stripped_of_threading_local(self):
- """threading.local overrides are restored for _MemcacheClient"""
- client_class = _memcache_pool._MemcacheClient
- # get the genuine thread._local from MRO
- thread_local = client_class.__mro__[2]
- self.assertTrue(thread_local is threading.local)
- for field in six.iterkeys(thread_local.__dict__):
- if field not in ('__dict__', '__weakref__'):
- self.assertNotEqual(id(getattr(thread_local, field, None)),
- id(getattr(client_class, field, None)))
diff --git a/keystone-moon/keystone/tests/unit/common/test_injection.py b/keystone-moon/keystone/tests/unit/common/test_injection.py
deleted file mode 100644
index 9a5d1e7d..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_injection.py
+++ /dev/null
@@ -1,238 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystone.common import dependency
-from keystone.tests import unit
-
-
-class TestDependencyInjection(unit.BaseTestCase):
- def setUp(self):
- super(TestDependencyInjection, self).setUp()
- dependency.reset()
- self.addCleanup(dependency.reset)
-
- def test_dependency_injection(self):
- class Interface(object):
- def do_work(self):
- assert False
-
- @dependency.provider('first_api')
- class FirstImplementation(Interface):
- def do_work(self):
- return True
-
- @dependency.provider('second_api')
- class SecondImplementation(Interface):
- def do_work(self):
- return True
-
- @dependency.requires('first_api', 'second_api')
- class Consumer(object):
- def do_work_with_dependencies(self):
- assert self.first_api.do_work()
- assert self.second_api.do_work()
-
- # initialize dependency providers
- first_api = FirstImplementation()
- second_api = SecondImplementation()
-
- # ... sometime later, initialize a dependency consumer
- consumer = Consumer()
-
- # the expected dependencies should be available to the consumer
- self.assertIs(consumer.first_api, first_api)
- self.assertIs(consumer.second_api, second_api)
- self.assertIsInstance(consumer.first_api, Interface)
- self.assertIsInstance(consumer.second_api, Interface)
- consumer.do_work_with_dependencies()
-
- def test_dependency_provider_configuration(self):
- @dependency.provider('api')
- class Configurable(object):
- def __init__(self, value=None):
- self.value = value
-
- def get_value(self):
- return self.value
-
- @dependency.requires('api')
- class Consumer(object):
- def get_value(self):
- return self.api.get_value()
-
- # initialize dependency providers
- api = Configurable(value=True)
-
- # ... sometime later, initialize a dependency consumer
- consumer = Consumer()
-
- # the expected dependencies should be available to the consumer
- self.assertIs(consumer.api, api)
- self.assertIsInstance(consumer.api, Configurable)
- self.assertTrue(consumer.get_value())
-
- def test_dependency_consumer_configuration(self):
- @dependency.provider('api')
- class Provider(object):
- def get_value(self):
- return True
-
- @dependency.requires('api')
- class Configurable(object):
- def __init__(self, value=None):
- self.value = value
-
- def get_value(self):
- if self.value:
- return self.api.get_value()
-
- # initialize dependency providers
- api = Provider()
-
- # ... sometime later, initialize a dependency consumer
- consumer = Configurable(value=True)
-
- # the expected dependencies should be available to the consumer
- self.assertIs(consumer.api, api)
- self.assertIsInstance(consumer.api, Provider)
- self.assertTrue(consumer.get_value())
-
- def test_inherited_dependency(self):
- class Interface(object):
- def do_work(self):
- assert False
-
- @dependency.provider('first_api')
- class FirstImplementation(Interface):
- def do_work(self):
- return True
-
- @dependency.provider('second_api')
- class SecondImplementation(Interface):
- def do_work(self):
- return True
-
- @dependency.requires('first_api')
- class ParentConsumer(object):
- def do_work_with_dependencies(self):
- assert self.first_api.do_work()
-
- @dependency.requires('second_api')
- class ChildConsumer(ParentConsumer):
- def do_work_with_dependencies(self):
- assert self.second_api.do_work()
- super(ChildConsumer, self).do_work_with_dependencies()
-
- # initialize dependency providers
- first_api = FirstImplementation()
- second_api = SecondImplementation()
-
- # ... sometime later, initialize a dependency consumer
- consumer = ChildConsumer()
-
- # dependencies should be naturally inherited
- self.assertEqual(
- set(['first_api']),
- ParentConsumer._dependencies)
- self.assertEqual(
- set(['first_api', 'second_api']),
- ChildConsumer._dependencies)
- self.assertEqual(
- set(['first_api', 'second_api']),
- consumer._dependencies)
-
- # the expected dependencies should be available to the consumer
- self.assertIs(consumer.first_api, first_api)
- self.assertIs(consumer.second_api, second_api)
- self.assertIsInstance(consumer.first_api, Interface)
- self.assertIsInstance(consumer.second_api, Interface)
- consumer.do_work_with_dependencies()
-
- def test_unresolvable_dependency(self):
- @dependency.requires(uuid.uuid4().hex)
- class Consumer(object):
- pass
-
- def for_test():
- Consumer()
- dependency.resolve_future_dependencies()
-
- self.assertRaises(dependency.UnresolvableDependencyException, for_test)
-
- def test_circular_dependency(self):
- p1_name = uuid.uuid4().hex
- p2_name = uuid.uuid4().hex
-
- @dependency.provider(p1_name)
- @dependency.requires(p2_name)
- class P1(object):
- pass
-
- @dependency.provider(p2_name)
- @dependency.requires(p1_name)
- class P2(object):
- pass
-
- p1 = P1()
- p2 = P2()
-
- dependency.resolve_future_dependencies()
-
- self.assertIs(getattr(p1, p2_name), p2)
- self.assertIs(getattr(p2, p1_name), p1)
-
- def test_reset(self):
- # Can reset the registry of providers.
-
- p_id = uuid.uuid4().hex
-
- @dependency.provider(p_id)
- class P(object):
- pass
-
- p_inst = P()
-
- self.assertIs(dependency.get_provider(p_id), p_inst)
-
- dependency.reset()
-
- self.assertFalse(dependency._REGISTRY)
-
- def test_get_provider(self):
- # Can get the instance of a provider using get_provider
-
- provider_name = uuid.uuid4().hex
-
- @dependency.provider(provider_name)
- class P(object):
- pass
-
- provider_instance = P()
- retrieved_provider_instance = dependency.get_provider(provider_name)
- self.assertIs(provider_instance, retrieved_provider_instance)
-
- def test_get_provider_not_provided_error(self):
- # If no provider and provider is required then fails.
-
- provider_name = uuid.uuid4().hex
- self.assertRaises(KeyError, dependency.get_provider, provider_name)
-
- def test_get_provider_not_provided_optional(self):
- # If no provider and provider is optional then returns None.
-
- provider_name = uuid.uuid4().hex
- self.assertIsNone(dependency.get_provider(provider_name,
- dependency.GET_OPTIONAL))
diff --git a/keystone-moon/keystone/tests/unit/common/test_json_home.py b/keystone-moon/keystone/tests/unit/common/test_json_home.py
deleted file mode 100644
index 94e2d138..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_json_home.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import copy
-
-from testtools import matchers
-
-from keystone.common import json_home
-from keystone.tests import unit
-
-
-class JsonHomeTest(unit.BaseTestCase):
- def test_build_v3_resource_relation(self):
- resource_name = self.getUniqueString()
- relation = json_home.build_v3_resource_relation(resource_name)
- exp_relation = (
- 'http://docs.openstack.org/api/openstack-identity/3/rel/%s' %
- resource_name)
- self.assertThat(relation, matchers.Equals(exp_relation))
-
- def test_build_v3_extension_resource_relation(self):
- extension_name = self.getUniqueString()
- extension_version = self.getUniqueString()
- resource_name = self.getUniqueString()
- relation = json_home.build_v3_extension_resource_relation(
- extension_name, extension_version, resource_name)
- exp_relation = (
- 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/rel/'
- '%s' % (extension_name, extension_version, resource_name))
- self.assertThat(relation, matchers.Equals(exp_relation))
-
- def test_build_v3_parameter_relation(self):
- parameter_name = self.getUniqueString()
- relation = json_home.build_v3_parameter_relation(parameter_name)
- exp_relation = (
- 'http://docs.openstack.org/api/openstack-identity/3/param/%s' %
- parameter_name)
- self.assertThat(relation, matchers.Equals(exp_relation))
-
- def test_build_v3_extension_parameter_relation(self):
- extension_name = self.getUniqueString()
- extension_version = self.getUniqueString()
- parameter_name = self.getUniqueString()
- relation = json_home.build_v3_extension_parameter_relation(
- extension_name, extension_version, parameter_name)
- exp_relation = (
- 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/'
- 'param/%s' % (extension_name, extension_version, parameter_name))
- self.assertThat(relation, matchers.Equals(exp_relation))
-
- def test_translate_urls(self):
- href_rel = self.getUniqueString()
- href = self.getUniqueString()
- href_template_rel = self.getUniqueString()
- href_template = self.getUniqueString()
- href_vars = {self.getUniqueString(): self.getUniqueString()}
- original_json_home = {
- 'resources': {
- href_rel: {'href': href},
- href_template_rel: {
- 'href-template': href_template,
- 'href-vars': href_vars}
- }
- }
-
- new_json_home = copy.deepcopy(original_json_home)
- new_prefix = self.getUniqueString()
- json_home.translate_urls(new_json_home, new_prefix)
-
- exp_json_home = {
- 'resources': {
- href_rel: {'href': new_prefix + href},
- href_template_rel: {
- 'href-template': new_prefix + href_template,
- 'href-vars': href_vars}
- }
- }
-
- self.assertThat(new_json_home, matchers.Equals(exp_json_home))
diff --git a/keystone-moon/keystone/tests/unit/common/test_ldap.py b/keystone-moon/keystone/tests/unit/common/test_ldap.py
deleted file mode 100644
index eed77286..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_ldap.py
+++ /dev/null
@@ -1,584 +0,0 @@
-# -*- coding: utf-8 -*-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-import tempfile
-import uuid
-
-import fixtures
-import ldap.dn
-import mock
-from oslo_config import cfg
-from testtools import matchers
-
-from keystone.common import driver_hints
-from keystone.common import ldap as ks_ldap
-from keystone.common.ldap import core as common_ldap_core
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import fakeldap
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-
-class DnCompareTest(unit.BaseTestCase):
- """Tests for the DN comparison functions in keystone.common.ldap.core."""
-
- def test_prep(self):
- # prep_case_insensitive returns the string with spaces at the front and
- # end if it's already lowercase and no insignificant characters.
- value = 'lowercase value'
- self.assertEqual(value, ks_ldap.prep_case_insensitive(value))
-
- def test_prep_lowercase(self):
- # prep_case_insensitive returns the string with spaces at the front and
- # end and lowercases the value.
- value = 'UPPERCASE VALUE'
- exp_value = value.lower()
- self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value))
-
- def test_prep_insignificant(self):
- # prep_case_insensitive remove insignificant spaces.
- value = 'before after'
- exp_value = 'before after'
- self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value))
-
- def test_prep_insignificant_pre_post(self):
- # prep_case_insensitive remove insignificant spaces.
- value = ' value '
- exp_value = 'value'
- self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value))
-
- def test_ava_equal_same(self):
- # is_ava_value_equal returns True if the two values are the same.
- value = 'val1'
- self.assertTrue(ks_ldap.is_ava_value_equal('cn', value, value))
-
- def test_ava_equal_complex(self):
- # is_ava_value_equal returns True if the two values are the same using
- # a value that's got different capitalization and insignificant chars.
- val1 = 'before after'
- val2 = ' BEFORE afTer '
- self.assertTrue(ks_ldap.is_ava_value_equal('cn', val1, val2))
-
- def test_ava_different(self):
- # is_ava_value_equal returns False if the values aren't the same.
- self.assertFalse(ks_ldap.is_ava_value_equal('cn', 'val1', 'val2'))
-
- def test_rdn_same(self):
- # is_rdn_equal returns True if the two values are the same.
- rdn = ldap.dn.str2dn('cn=val1')[0]
- self.assertTrue(ks_ldap.is_rdn_equal(rdn, rdn))
-
- def test_rdn_diff_length(self):
- # is_rdn_equal returns False if the RDNs have a different number of
- # AVAs.
- rdn1 = ldap.dn.str2dn('cn=cn1')[0]
- rdn2 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
- self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2))
-
- def test_rdn_multi_ava_same_order(self):
- # is_rdn_equal returns True if the RDNs have the same number of AVAs
- # and the values are the same.
- rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
- rdn2 = ldap.dn.str2dn('cn=CN1+ou=OU1')[0]
- self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2))
-
- def test_rdn_multi_ava_diff_order(self):
- # is_rdn_equal returns True if the RDNs have the same number of AVAs
- # and the values are the same, even if in a different order
- rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
- rdn2 = ldap.dn.str2dn('ou=OU1+cn=CN1')[0]
- self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2))
-
- def test_rdn_multi_ava_diff_type(self):
- # is_rdn_equal returns False if the RDNs have the same number of AVAs
- # and the attribute types are different.
- rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0]
- rdn2 = ldap.dn.str2dn('cn=cn1+sn=sn1')[0]
- self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2))
-
- def test_rdn_attr_type_case_diff(self):
- # is_rdn_equal returns True for same RDNs even when attr type case is
- # different.
- rdn1 = ldap.dn.str2dn('cn=cn1')[0]
- rdn2 = ldap.dn.str2dn('CN=cn1')[0]
- self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2))
-
- def test_rdn_attr_type_alias(self):
- # is_rdn_equal returns False for same RDNs even when attr type alias is
- # used. Note that this is a limitation since an LDAP server should
- # consider them equal.
- rdn1 = ldap.dn.str2dn('cn=cn1')[0]
- rdn2 = ldap.dn.str2dn('2.5.4.3=cn1')[0]
- self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2))
-
- def test_dn_same(self):
- # is_dn_equal returns True if the DNs are the same.
- dn = 'cn=Babs Jansen,ou=OpenStack'
- self.assertTrue(ks_ldap.is_dn_equal(dn, dn))
-
- def test_dn_equal_unicode(self):
- # is_dn_equal can accept unicode
- dn = u'cn=fäké,ou=OpenStack'
- self.assertTrue(ks_ldap.is_dn_equal(dn, dn))
-
- def test_dn_diff_length(self):
- # is_dn_equal returns False if the DNs don't have the same number of
- # RDNs
- dn1 = 'cn=Babs Jansen,ou=OpenStack'
- dn2 = 'cn=Babs Jansen,ou=OpenStack,dc=example.com'
- self.assertFalse(ks_ldap.is_dn_equal(dn1, dn2))
-
- def test_dn_equal_rdns(self):
- # is_dn_equal returns True if the DNs have the same number of RDNs
- # and each RDN is the same.
- dn1 = 'cn=Babs Jansen,ou=OpenStack+cn=OpenSource'
- dn2 = 'CN=Babs Jansen,cn=OpenSource+ou=OpenStack'
- self.assertTrue(ks_ldap.is_dn_equal(dn1, dn2))
-
- def test_dn_parsed_dns(self):
- # is_dn_equal can also accept parsed DNs.
- dn_str1 = ldap.dn.str2dn('cn=Babs Jansen,ou=OpenStack+cn=OpenSource')
- dn_str2 = ldap.dn.str2dn('CN=Babs Jansen,cn=OpenSource+ou=OpenStack')
- self.assertTrue(ks_ldap.is_dn_equal(dn_str1, dn_str2))
-
- def test_startswith_under_child(self):
- # dn_startswith returns True if descendant_dn is a child of dn.
- child = 'cn=Babs Jansen,ou=OpenStack'
- parent = 'ou=OpenStack'
- self.assertTrue(ks_ldap.dn_startswith(child, parent))
-
- def test_startswith_parent(self):
- # dn_startswith returns False if descendant_dn is a parent of dn.
- child = 'cn=Babs Jansen,ou=OpenStack'
- parent = 'ou=OpenStack'
- self.assertFalse(ks_ldap.dn_startswith(parent, child))
-
- def test_startswith_same(self):
- # dn_startswith returns False if DNs are the same.
- dn = 'cn=Babs Jansen,ou=OpenStack'
- self.assertFalse(ks_ldap.dn_startswith(dn, dn))
-
- def test_startswith_not_parent(self):
- # dn_startswith returns False if descendant_dn is not under the dn
- child = 'cn=Babs Jansen,ou=OpenStack'
- parent = 'dc=example.com'
- self.assertFalse(ks_ldap.dn_startswith(child, parent))
-
- def test_startswith_descendant(self):
- # dn_startswith returns True if descendant_dn is a descendant of dn.
- descendant = 'cn=Babs Jansen,ou=Keystone,ou=OpenStack,dc=example.com'
- dn = 'ou=OpenStack,dc=example.com'
- self.assertTrue(ks_ldap.dn_startswith(descendant, dn))
-
- descendant = 'uid=12345,ou=Users,dc=example,dc=com'
- dn = 'ou=Users,dc=example,dc=com'
- self.assertTrue(ks_ldap.dn_startswith(descendant, dn))
-
- def test_startswith_parsed_dns(self):
- # dn_startswith also accepts parsed DNs.
- descendant = ldap.dn.str2dn('cn=Babs Jansen,ou=OpenStack')
- dn = ldap.dn.str2dn('ou=OpenStack')
- self.assertTrue(ks_ldap.dn_startswith(descendant, dn))
-
- def test_startswith_unicode(self):
- # dn_startswith accepts unicode.
- child = u'cn=fäké,ou=OpenStäck'
- parent = u'ou=OpenStäck'
- self.assertTrue(ks_ldap.dn_startswith(child, parent))
-
-
-class LDAPDeleteTreeTest(unit.TestCase):
-
- def setUp(self):
- super(LDAPDeleteTreeTest, self).setUp()
-
- ks_ldap.register_handler('fake://',
- fakeldap.FakeLdapNoSubtreeDelete)
- self.useFixture(database.Database(self.sql_driver_version_overrides))
-
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- self.addCleanup(self.clear_database)
- self.addCleanup(common_ldap_core._HANDLERS.clear)
-
- def clear_database(self):
- for shelf in fakeldap.FakeShelves:
- fakeldap.FakeShelves[shelf].clear()
-
- def config_overrides(self):
- super(LDAPDeleteTreeTest, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
-
- def config_files(self):
- config_files = super(LDAPDeleteTreeTest, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- def test_delete_tree(self):
- """Test manually deleting a tree.
-
- Few LDAP servers support CONTROL_DELETETREE. This test
- exercises the alternate code paths in BaseLdap.delete_tree.
-
- """
- conn = self.identity_api.user.get_connection()
- id_attr = self.identity_api.user.id_attr
- objclass = self.identity_api.user.object_class.lower()
- tree_dn = self.identity_api.user.tree_dn
-
- def create_entry(name, parent_dn=None):
- if not parent_dn:
- parent_dn = tree_dn
- dn = '%s=%s,%s' % (id_attr, name, parent_dn)
- attrs = [('objectclass', [objclass, 'ldapsubentry']),
- (id_attr, [name])]
- conn.add_s(dn, attrs)
- return dn
-
- # create 3 entries like this:
- # cn=base
- # cn=child,cn=base
- # cn=grandchild,cn=child,cn=base
- # then attempt to delete_tree(cn=base)
- base_id = 'base'
- base_dn = create_entry(base_id)
- child_dn = create_entry('child', base_dn)
- grandchild_dn = create_entry('grandchild', child_dn)
-
- # verify that the three entries were created
- scope = ldap.SCOPE_SUBTREE
- filt = '(|(objectclass=*)(objectclass=ldapsubentry))'
- entries = conn.search_s(base_dn, scope, filt,
- attrlist=common_ldap_core.DN_ONLY)
- self.assertThat(entries, matchers.HasLength(3))
- sort_ents = sorted([e[0] for e in entries], key=len, reverse=True)
- self.assertEqual([grandchild_dn, child_dn, base_dn], sort_ents)
-
- # verify that a non-leaf node can't be deleted directly by the
- # LDAP server
- self.assertRaises(ldap.NOT_ALLOWED_ON_NONLEAF,
- conn.delete_s, base_dn)
- self.assertRaises(ldap.NOT_ALLOWED_ON_NONLEAF,
- conn.delete_s, child_dn)
-
- # call our delete_tree implementation
- self.identity_api.user.delete_tree(base_id)
- self.assertRaises(ldap.NO_SUCH_OBJECT,
- conn.search_s, base_dn, ldap.SCOPE_BASE)
- self.assertRaises(ldap.NO_SUCH_OBJECT,
- conn.search_s, child_dn, ldap.SCOPE_BASE)
- self.assertRaises(ldap.NO_SUCH_OBJECT,
- conn.search_s, grandchild_dn, ldap.SCOPE_BASE)
-
-
-class MultiURLTests(unit.TestCase):
- """Tests for setting multiple LDAP URLs."""
-
- def test_multiple_urls_with_comma_no_conn_pool(self):
- urls = 'ldap://localhost,ldap://backup.localhost'
- self.config_fixture.config(group='ldap', url=urls, use_pool=False)
- base_ldap = ks_ldap.BaseLdap(CONF)
- ldap_connection = base_ldap.get_connection()
- self.assertEqual(urls, ldap_connection.conn.conn._uri)
-
- def test_multiple_urls_with_comma_with_conn_pool(self):
- urls = 'ldap://localhost,ldap://backup.localhost'
- self.config_fixture.config(group='ldap', url=urls, use_pool=True)
- base_ldap = ks_ldap.BaseLdap(CONF)
- ldap_connection = base_ldap.get_connection()
- self.assertEqual(urls, ldap_connection.conn.conn_pool.uri)
-
-
-class SslTlsTest(unit.TestCase):
- """Tests for the SSL/TLS functionality in keystone.common.ldap.core."""
-
- @mock.patch.object(ks_ldap.core.KeystoneLDAPHandler, 'simple_bind_s')
- @mock.patch.object(ldap.ldapobject.LDAPObject, 'start_tls_s')
- def _init_ldap_connection(self, config, mock_ldap_one, mock_ldap_two):
- # Attempt to connect to initialize python-ldap.
- base_ldap = ks_ldap.BaseLdap(config)
- base_ldap.get_connection()
-
- def test_certfile_trust_tls(self):
- # We need this to actually exist, so we create a tempfile.
- (handle, certfile) = tempfile.mkstemp()
- self.addCleanup(os.unlink, certfile)
- self.addCleanup(os.close, handle)
- self.config_fixture.config(group='ldap',
- url='ldap://localhost',
- use_tls=True,
- tls_cacertfile=certfile)
-
- self._init_ldap_connection(CONF)
-
- # Ensure the cert trust option is set.
- self.assertEqual(certfile, ldap.get_option(ldap.OPT_X_TLS_CACERTFILE))
-
- def test_certdir_trust_tls(self):
- # We need this to actually exist, so we create a tempdir.
- certdir = self.useFixture(fixtures.TempDir()).path
- self.config_fixture.config(group='ldap',
- url='ldap://localhost',
- use_tls=True,
- tls_cacertdir=certdir)
-
- self._init_ldap_connection(CONF)
-
- # Ensure the cert trust option is set.
- self.assertEqual(certdir, ldap.get_option(ldap.OPT_X_TLS_CACERTDIR))
-
- def test_certfile_trust_ldaps(self):
- # We need this to actually exist, so we create a tempfile.
- (handle, certfile) = tempfile.mkstemp()
- self.addCleanup(os.unlink, certfile)
- self.addCleanup(os.close, handle)
- self.config_fixture.config(group='ldap',
- url='ldaps://localhost',
- use_tls=False,
- tls_cacertfile=certfile)
-
- self._init_ldap_connection(CONF)
-
- # Ensure the cert trust option is set.
- self.assertEqual(certfile, ldap.get_option(ldap.OPT_X_TLS_CACERTFILE))
-
- def test_certdir_trust_ldaps(self):
- # We need this to actually exist, so we create a tempdir.
- certdir = self.useFixture(fixtures.TempDir()).path
- self.config_fixture.config(group='ldap',
- url='ldaps://localhost',
- use_tls=False,
- tls_cacertdir=certdir)
-
- self._init_ldap_connection(CONF)
-
- # Ensure the cert trust option is set.
- self.assertEqual(certdir, ldap.get_option(ldap.OPT_X_TLS_CACERTDIR))
-
-
-class LDAPPagedResultsTest(unit.TestCase):
- """Tests the paged results functionality in keystone.common.ldap.core."""
-
- def setUp(self):
- super(LDAPPagedResultsTest, self).setUp()
- self.clear_database()
-
- ks_ldap.register_handler('fake://', fakeldap.FakeLdap)
- self.addCleanup(common_ldap_core._HANDLERS.clear)
- self.useFixture(database.Database(self.sql_driver_version_overrides))
-
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- def clear_database(self):
- for shelf in fakeldap.FakeShelves:
- fakeldap.FakeShelves[shelf].clear()
-
- def config_overrides(self):
- super(LDAPPagedResultsTest, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
-
- def config_files(self):
- config_files = super(LDAPPagedResultsTest, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- @mock.patch.object(fakeldap.FakeLdap, 'search_ext')
- @mock.patch.object(fakeldap.FakeLdap, 'result3')
- def test_paged_results_control_api(self, mock_result3, mock_search_ext):
- mock_result3.return_value = ('', [], 1, [])
-
- self.config_fixture.config(group='ldap',
- page_size=1)
-
- conn = self.identity_api.user.get_connection()
- conn._paged_search_s('dc=example,dc=test',
- ldap.SCOPE_SUBTREE,
- 'objectclass=*')
-
-
-class CommonLdapTestCase(unit.BaseTestCase):
- """These test cases call functions in keystone.common.ldap."""
-
- def test_binary_attribute_values(self):
- result = [(
- 'cn=junk,dc=example,dc=com',
- {
- 'cn': ['junk'],
- 'sn': [uuid.uuid4().hex],
- 'mail': [uuid.uuid4().hex],
- 'binary_attr': ['\x00\xFF\x00\xFF']
- }
- ), ]
- py_result = ks_ldap.convert_ldap_result(result)
- # The attribute containing the binary value should
- # not be present in the converted result.
- self.assertNotIn('binary_attr', py_result[0][1])
-
- def test_utf8_conversion(self):
- value_unicode = u'fäké1'
- value_utf8 = value_unicode.encode('utf-8')
-
- result_utf8 = ks_ldap.utf8_encode(value_unicode)
- self.assertEqual(value_utf8, result_utf8)
-
- result_utf8 = ks_ldap.utf8_encode(value_utf8)
- self.assertEqual(value_utf8, result_utf8)
-
- result_unicode = ks_ldap.utf8_decode(value_utf8)
- self.assertEqual(value_unicode, result_unicode)
-
- result_unicode = ks_ldap.utf8_decode(value_unicode)
- self.assertEqual(value_unicode, result_unicode)
-
- self.assertRaises(TypeError,
- ks_ldap.utf8_encode,
- 100)
-
- result_unicode = ks_ldap.utf8_decode(100)
- self.assertEqual(u'100', result_unicode)
-
- def test_user_id_begins_with_0(self):
- user_id = '0123456'
- result = [(
- 'cn=dummy,dc=example,dc=com',
- {
- 'user_id': [user_id],
- 'enabled': ['TRUE']
- }
- ), ]
- py_result = ks_ldap.convert_ldap_result(result)
- # The user id should be 0123456, and the enabled
- # flag should be True
- self.assertIs(py_result[0][1]['enabled'][0], True)
- self.assertEqual(user_id, py_result[0][1]['user_id'][0])
-
- def test_user_id_begins_with_0_and_enabled_bit_mask(self):
- user_id = '0123456'
- bitmask = '225'
- expected_bitmask = 225
- result = [(
- 'cn=dummy,dc=example,dc=com',
- {
- 'user_id': [user_id],
- 'enabled': [bitmask]
- }
- ), ]
- py_result = ks_ldap.convert_ldap_result(result)
- # The user id should be 0123456, and the enabled
- # flag should be 225
- self.assertEqual(expected_bitmask, py_result[0][1]['enabled'][0])
- self.assertEqual(user_id, py_result[0][1]['user_id'][0])
-
- def test_user_id_and_bitmask_begins_with_0(self):
- user_id = '0123456'
- bitmask = '0225'
- expected_bitmask = 225
- result = [(
- 'cn=dummy,dc=example,dc=com',
- {
- 'user_id': [user_id],
- 'enabled': [bitmask]
- }
- ), ]
- py_result = ks_ldap.convert_ldap_result(result)
- # The user id should be 0123456, and the enabled
- # flag should be 225, the 0 is dropped.
- self.assertEqual(expected_bitmask, py_result[0][1]['enabled'][0])
- self.assertEqual(user_id, py_result[0][1]['user_id'][0])
-
- def test_user_id_and_user_name_with_boolean_string(self):
- boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
- 'TrUe' 'FaLse']
- for user_name in boolean_strings:
- user_id = uuid.uuid4().hex
- result = [(
- 'cn=dummy,dc=example,dc=com',
- {
- 'user_id': [user_id],
- 'user_name': [user_name]
- }
- ), ]
- py_result = ks_ldap.convert_ldap_result(result)
- # The user name should still be a string value.
- self.assertEqual(user_name, py_result[0][1]['user_name'][0])
-
-
-class LDAPFilterQueryCompositionTest(unit.TestCase):
- """These test cases test LDAP filter generation."""
-
- def setUp(self):
- super(LDAPFilterQueryCompositionTest, self).setUp()
-
- self.base_ldap = ks_ldap.BaseLdap(self.config_fixture.conf)
-
- # The tests need an attribute mapping to use.
- self.attribute_name = uuid.uuid4().hex
- self.filter_attribute_name = uuid.uuid4().hex
- self.base_ldap.attribute_mapping = {
- self.attribute_name: self.filter_attribute_name
- }
-
- def test_return_query_with_no_hints(self):
- hints = driver_hints.Hints()
- # NOTE: doesn't have to be a real query, we just need to make sure the
- # same string is returned if there are no hints.
- query = uuid.uuid4().hex
- self.assertEqual(query,
- self.base_ldap.filter_query(hints=hints, query=query))
-
- # make sure the default query is an empty string
- self.assertEqual('', self.base_ldap.filter_query(hints=hints))
-
- def test_filter_with_empty_query_and_hints_set(self):
- hints = driver_hints.Hints()
- username = uuid.uuid4().hex
- hints.add_filter(name=self.attribute_name,
- value=username,
- comparator='equals',
- case_sensitive=False)
- expected_ldap_filter = '(&(%s=%s))' % (
- self.filter_attribute_name, username)
- self.assertEqual(expected_ldap_filter,
- self.base_ldap.filter_query(hints=hints))
-
- def test_filter_with_both_query_and_hints_set(self):
- hints = driver_hints.Hints()
- # NOTE: doesn't have to be a real query, we just need to make sure the
- # filter string is concatenated correctly
- query = uuid.uuid4().hex
- username = uuid.uuid4().hex
- expected_result = '(&%(query)s(%(user_name_attr)s=%(username)s))' % (
- {'query': query,
- 'user_name_attr': self.filter_attribute_name,
- 'username': username})
- hints.add_filter(self.attribute_name, username)
- self.assertEqual(expected_result,
- self.base_ldap.filter_query(hints=hints, query=query))
-
- def test_filter_with_hints_and_query_is_none(self):
- hints = driver_hints.Hints()
- username = uuid.uuid4().hex
- hints.add_filter(name=self.attribute_name,
- value=username,
- comparator='equals',
- case_sensitive=False)
- expected_ldap_filter = '(&(%s=%s))' % (
- self.filter_attribute_name, username)
- self.assertEqual(expected_ldap_filter,
- self.base_ldap.filter_query(hints=hints, query=None))
diff --git a/keystone-moon/keystone/tests/unit/common/test_manager.py b/keystone-moon/keystone/tests/unit/common/test_manager.py
deleted file mode 100644
index 7ef91e15..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_manager.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import mock
-
-from keystone import catalog
-from keystone.common import manager
-from keystone.tests import unit
-
-
-class TestCreateLegacyDriver(unit.BaseTestCase):
-
- @mock.patch('oslo_log.versionutils.report_deprecated_feature')
- def test_class_is_properly_deprecated(self, mock_reporter):
- Driver = manager.create_legacy_driver(catalog.CatalogDriverV8)
-
- # NOTE(dstanek): I want to subvert the requirement for this
- # class to implement all of the abstract methods.
- Driver.__abstractmethods__ = set()
- impl = Driver()
-
- details = {
- 'as_of': 'Liberty',
- 'what': 'keystone.catalog.core.Driver',
- 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8',
- 'remove_in': mock.ANY,
- }
- mock_reporter.assert_called_with(mock.ANY, mock.ANY, details)
- self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0])
-
- self.assertIsInstance(impl, catalog.CatalogDriverV8)
diff --git a/keystone-moon/keystone/tests/unit/common/test_notifications.py b/keystone-moon/keystone/tests/unit/common/test_notifications.py
deleted file mode 100644
index aa2e6f72..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_notifications.py
+++ /dev/null
@@ -1,1248 +0,0 @@
-# Copyright 2013 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-import uuid
-
-import mock
-from oslo_config import cfg
-from oslo_config import fixture as config_fixture
-from oslotest import mockpatch
-from pycadf import cadftaxonomy
-from pycadf import cadftype
-from pycadf import eventfactory
-from pycadf import resource as cadfresource
-
-from keystone import notifications
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-CONF = cfg.CONF
-
-EXP_RESOURCE_TYPE = uuid.uuid4().hex
-CREATED_OPERATION = notifications.ACTIONS.created
-UPDATED_OPERATION = notifications.ACTIONS.updated
-DELETED_OPERATION = notifications.ACTIONS.deleted
-DISABLED_OPERATION = notifications.ACTIONS.disabled
-
-
-class ArbitraryException(Exception):
- pass
-
-
-def register_callback(operation, resource_type=EXP_RESOURCE_TYPE):
- """Helper for creating and registering a mock callback."""
- callback = mock.Mock(__name__='callback',
- im_class=mock.Mock(__name__='class'))
- notifications.register_event_callback(operation, resource_type, callback)
- return callback
-
-
-class AuditNotificationsTestCase(unit.BaseTestCase):
- def setUp(self):
- super(AuditNotificationsTestCase, self).setUp()
- self.config_fixture = self.useFixture(config_fixture.Config(CONF))
- self.addCleanup(notifications.clear_subscribers)
-
- def _test_notification_operation(self, notify_function, operation):
- exp_resource_id = uuid.uuid4().hex
- callback = register_callback(operation)
- notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
- callback.assert_called_once_with('identity', EXP_RESOURCE_TYPE,
- operation,
- {'resource_info': exp_resource_id})
- self.config_fixture.config(notification_format='cadf')
- with mock.patch(
- 'keystone.notifications._create_cadf_payload') as cadf_notify:
- notify_function(EXP_RESOURCE_TYPE, exp_resource_id)
- initiator = None
- cadf_notify.assert_called_once_with(
- operation, EXP_RESOURCE_TYPE, exp_resource_id,
- notifications.taxonomy.OUTCOME_SUCCESS, initiator)
- notify_function(EXP_RESOURCE_TYPE, exp_resource_id, public=False)
- cadf_notify.assert_called_once_with(
- operation, EXP_RESOURCE_TYPE, exp_resource_id,
- notifications.taxonomy.OUTCOME_SUCCESS, initiator)
-
- def test_resource_created_notification(self):
- self._test_notification_operation(notifications.Audit.created,
- CREATED_OPERATION)
-
- def test_resource_updated_notification(self):
- self._test_notification_operation(notifications.Audit.updated,
- UPDATED_OPERATION)
-
- def test_resource_deleted_notification(self):
- self._test_notification_operation(notifications.Audit.deleted,
- DELETED_OPERATION)
-
- def test_resource_disabled_notification(self):
- self._test_notification_operation(notifications.Audit.disabled,
- DISABLED_OPERATION)
-
-
-class NotificationsTestCase(unit.BaseTestCase):
-
- def test_send_notification(self):
- """Test _send_notification.
-
- Test the private method _send_notification to ensure event_type,
- payload, and context are built and passed properly.
-
- """
- resource = uuid.uuid4().hex
- resource_type = EXP_RESOURCE_TYPE
- operation = CREATED_OPERATION
-
- # NOTE(ldbragst): Even though notifications._send_notification doesn't
- # contain logic that creates cases, this is supposed to test that
- # context is always empty and that we ensure the resource ID of the
- # resource in the notification is contained in the payload. It was
- # agreed that context should be empty in Keystone's case, which is
- # also noted in the /keystone/notifications.py module. This test
- # ensures and maintains these conditions.
- expected_args = [
- {}, # empty context
- 'identity.%s.created' % resource_type, # event_type
- {'resource_info': resource}, # payload
- 'INFO', # priority is always INFO...
- ]
-
- with mock.patch.object(notifications._get_notifier(),
- '_notify') as mocked:
- notifications._send_notification(operation, resource_type,
- resource)
- mocked.assert_called_once_with(*expected_args)
-
- def test_send_notification_with_opt_out(self):
- """Test the private method _send_notification with opt-out.
-
- Test that _send_notification does not notify when a valid
- notification_opt_out configuration is provided.
- """
- resource = uuid.uuid4().hex
- resource_type = EXP_RESOURCE_TYPE
- operation = CREATED_OPERATION
- event_type = 'identity.%s.created' % resource_type
-
- # NOTE(diazjf): Here we add notification_opt_out to the
- # configuration so that we should return before _get_notifer is
- # called. This is because we are opting out notifications for the
- # passed resource_type and operation.
- conf = self.useFixture(config_fixture.Config(CONF))
- conf.config(notification_opt_out=event_type)
-
- with mock.patch.object(notifications._get_notifier(),
- '_notify') as mocked:
-
- notifications._send_notification(operation, resource_type,
- resource)
- mocked.assert_not_called()
-
- def test_send_audit_notification_with_opt_out(self):
- """Test the private method _send_audit_notification with opt-out.
-
- Test that _send_audit_notification does not notify when a valid
- notification_opt_out configuration is provided.
- """
- resource_type = EXP_RESOURCE_TYPE
-
- action = CREATED_OPERATION + '.' + resource_type
- initiator = mock
- target = mock
- outcome = 'success'
- event_type = 'identity.%s.created' % resource_type
-
- conf = self.useFixture(config_fixture.Config(CONF))
- conf.config(notification_opt_out=event_type)
-
- with mock.patch.object(notifications._get_notifier(),
- '_notify') as mocked:
-
- notifications._send_audit_notification(action,
- initiator,
- outcome,
- target,
- event_type)
- mocked.assert_not_called()
-
- def test_opt_out_authenticate_event(self):
- """Test that authenticate events are successfully opted out."""
- resource_type = EXP_RESOURCE_TYPE
-
- action = CREATED_OPERATION + '.' + resource_type
- initiator = mock
- target = mock
- outcome = 'success'
- event_type = 'identity.authenticate'
- meter_name = '%s.%s' % (event_type, outcome)
-
- conf = self.useFixture(config_fixture.Config(CONF))
- conf.config(notification_opt_out=meter_name)
-
- with mock.patch.object(notifications._get_notifier(),
- '_notify') as mocked:
-
- notifications._send_audit_notification(action,
- initiator,
- outcome,
- target,
- event_type)
- mocked.assert_not_called()
-
-
-class BaseNotificationTest(test_v3.RestfulTestCase):
-
- def setUp(self):
- super(BaseNotificationTest, self).setUp()
-
- self._notifications = []
- self._audits = []
-
- def fake_notify(operation, resource_type, resource_id,
- actor_dict=None, public=True):
- note = {
- 'resource_id': resource_id,
- 'operation': operation,
- 'resource_type': resource_type,
- 'send_notification_called': True,
- 'public': public}
- if actor_dict:
- note['actor_id'] = actor_dict.get('id')
- note['actor_type'] = actor_dict.get('type')
- note['actor_operation'] = actor_dict.get('actor_operation')
- self._notifications.append(note)
-
- self.useFixture(mockpatch.PatchObject(
- notifications, '_send_notification', fake_notify))
-
- def fake_audit(action, initiator, outcome, target,
- event_type, **kwargs):
- service_security = cadftaxonomy.SERVICE_SECURITY
-
- event = eventfactory.EventFactory().new_event(
- eventType=cadftype.EVENTTYPE_ACTIVITY,
- outcome=outcome,
- action=action,
- initiator=initiator,
- target=target,
- observer=cadfresource.Resource(typeURI=service_security))
-
- for key, value in kwargs.items():
- setattr(event, key, value)
-
- audit = {
- 'payload': event.as_dict(),
- 'event_type': event_type,
- 'send_notification_called': True}
- self._audits.append(audit)
-
- self.useFixture(mockpatch.PatchObject(
- notifications, '_send_audit_notification', fake_audit))
-
- def _assert_last_note(self, resource_id, operation, resource_type,
- actor_id=None, actor_type=None,
- actor_operation=None):
- # NOTE(stevemar): If 'basic' format is not used, then simply
- # return since this assertion is not valid.
- if CONF.notification_format != 'basic':
- return
- self.assertTrue(len(self._notifications) > 0)
- note = self._notifications[-1]
- self.assertEqual(operation, note['operation'])
- self.assertEqual(resource_id, note['resource_id'])
- self.assertEqual(resource_type, note['resource_type'])
- self.assertTrue(note['send_notification_called'])
- if actor_id:
- self.assertEqual(actor_id, note['actor_id'])
- self.assertEqual(actor_type, note['actor_type'])
- self.assertEqual(actor_operation, note['actor_operation'])
-
- def _assert_last_audit(self, resource_id, operation, resource_type,
- target_uri):
- # NOTE(stevemar): If 'cadf' format is not used, then simply
- # return since this assertion is not valid.
- if CONF.notification_format != 'cadf':
- return
- self.assertTrue(len(self._audits) > 0)
- audit = self._audits[-1]
- payload = audit['payload']
- self.assertEqual(resource_id, payload['resource_info'])
- action = '%s.%s' % (operation, resource_type)
- self.assertEqual(action, payload['action'])
- self.assertEqual(target_uri, payload['target']['typeURI'])
- self.assertEqual(resource_id, payload['target']['id'])
- event_type = '%s.%s.%s' % ('identity', resource_type, operation)
- self.assertEqual(event_type, audit['event_type'])
- self.assertTrue(audit['send_notification_called'])
-
- def _assert_initiator_data_is_set(self, operation, resource_type, typeURI):
- self.assertTrue(len(self._audits) > 0)
- audit = self._audits[-1]
- payload = audit['payload']
- self.assertEqual(self.user_id, payload['initiator']['id'])
- self.assertEqual(self.project_id, payload['initiator']['project_id'])
- self.assertEqual(typeURI, payload['target']['typeURI'])
- action = '%s.%s' % (operation, resource_type)
- self.assertEqual(action, payload['action'])
-
- def _assert_notify_not_sent(self, resource_id, operation, resource_type,
- public=True):
- unexpected = {
- 'resource_id': resource_id,
- 'operation': operation,
- 'resource_type': resource_type,
- 'send_notification_called': True,
- 'public': public}
- for note in self._notifications:
- self.assertNotEqual(unexpected, note)
-
- def _assert_notify_sent(self, resource_id, operation, resource_type,
- public=True):
- expected = {
- 'resource_id': resource_id,
- 'operation': operation,
- 'resource_type': resource_type,
- 'send_notification_called': True,
- 'public': public}
- for note in self._notifications:
- if expected == note:
- break
- else:
- self.fail("Notification not sent.")
-
-
-class NotificationsForEntities(BaseNotificationTest):
-
- def test_create_group(self):
- group_ref = unit.new_group_ref(domain_id=self.domain_id)
- group_ref = self.identity_api.create_group(group_ref)
- self._assert_last_note(group_ref['id'], CREATED_OPERATION, 'group')
- self._assert_last_audit(group_ref['id'], CREATED_OPERATION, 'group',
- cadftaxonomy.SECURITY_GROUP)
-
- def test_create_project(self):
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- self._assert_last_note(
- project_ref['id'], CREATED_OPERATION, 'project')
- self._assert_last_audit(project_ref['id'], CREATED_OPERATION,
- 'project', cadftaxonomy.SECURITY_PROJECT)
-
- def test_create_role(self):
- role_ref = unit.new_role_ref()
- self.role_api.create_role(role_ref['id'], role_ref)
- self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
- self._assert_last_audit(role_ref['id'], CREATED_OPERATION, 'role',
- cadftaxonomy.SECURITY_ROLE)
-
- def test_create_user(self):
- user_ref = unit.new_user_ref(domain_id=self.domain_id)
- user_ref = self.identity_api.create_user(user_ref)
- self._assert_last_note(user_ref['id'], CREATED_OPERATION, 'user')
- self._assert_last_audit(user_ref['id'], CREATED_OPERATION, 'user',
- cadftaxonomy.SECURITY_ACCOUNT_USER)
-
- def test_create_trust(self):
- trustor = unit.new_user_ref(domain_id=self.domain_id)
- trustor = self.identity_api.create_user(trustor)
- trustee = unit.new_user_ref(domain_id=self.domain_id)
- trustee = self.identity_api.create_user(trustee)
- role_ref = unit.new_role_ref()
- self.role_api.create_role(role_ref['id'], role_ref)
- trust_ref = unit.new_trust_ref(trustor['id'],
- trustee['id'])
- self.trust_api.create_trust(trust_ref['id'],
- trust_ref,
- [role_ref])
- self._assert_last_note(
- trust_ref['id'], CREATED_OPERATION, 'OS-TRUST:trust')
- self._assert_last_audit(trust_ref['id'], CREATED_OPERATION,
- 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
-
- def test_delete_group(self):
- group_ref = unit.new_group_ref(domain_id=self.domain_id)
- group_ref = self.identity_api.create_group(group_ref)
- self.identity_api.delete_group(group_ref['id'])
- self._assert_last_note(group_ref['id'], DELETED_OPERATION, 'group')
- self._assert_last_audit(group_ref['id'], DELETED_OPERATION, 'group',
- cadftaxonomy.SECURITY_GROUP)
-
- def test_delete_project(self):
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.resource_api.delete_project(project_ref['id'])
- self._assert_last_note(
- project_ref['id'], DELETED_OPERATION, 'project')
- self._assert_last_audit(project_ref['id'], DELETED_OPERATION,
- 'project', cadftaxonomy.SECURITY_PROJECT)
-
- def test_delete_role(self):
- role_ref = unit.new_role_ref()
- self.role_api.create_role(role_ref['id'], role_ref)
- self.role_api.delete_role(role_ref['id'])
- self._assert_last_note(role_ref['id'], DELETED_OPERATION, 'role')
- self._assert_last_audit(role_ref['id'], DELETED_OPERATION, 'role',
- cadftaxonomy.SECURITY_ROLE)
-
- def test_delete_user(self):
- user_ref = unit.new_user_ref(domain_id=self.domain_id)
- user_ref = self.identity_api.create_user(user_ref)
- self.identity_api.delete_user(user_ref['id'])
- self._assert_last_note(user_ref['id'], DELETED_OPERATION, 'user')
- self._assert_last_audit(user_ref['id'], DELETED_OPERATION, 'user',
- cadftaxonomy.SECURITY_ACCOUNT_USER)
-
- def test_create_domain(self):
- domain_ref = unit.new_domain_ref()
- self.resource_api.create_domain(domain_ref['id'], domain_ref)
- self._assert_last_note(domain_ref['id'], CREATED_OPERATION, 'domain')
- self._assert_last_audit(domain_ref['id'], CREATED_OPERATION, 'domain',
- cadftaxonomy.SECURITY_DOMAIN)
-
- def test_update_domain(self):
- domain_ref = unit.new_domain_ref()
- self.resource_api.create_domain(domain_ref['id'], domain_ref)
- domain_ref['description'] = uuid.uuid4().hex
- self.resource_api.update_domain(domain_ref['id'], domain_ref)
- self._assert_last_note(domain_ref['id'], UPDATED_OPERATION, 'domain')
- self._assert_last_audit(domain_ref['id'], UPDATED_OPERATION, 'domain',
- cadftaxonomy.SECURITY_DOMAIN)
-
- def test_delete_domain(self):
- domain_ref = unit.new_domain_ref()
- self.resource_api.create_domain(domain_ref['id'], domain_ref)
- domain_ref['enabled'] = False
- self.resource_api.update_domain(domain_ref['id'], domain_ref)
- self.resource_api.delete_domain(domain_ref['id'])
- self._assert_last_note(domain_ref['id'], DELETED_OPERATION, 'domain')
- self._assert_last_audit(domain_ref['id'], DELETED_OPERATION, 'domain',
- cadftaxonomy.SECURITY_DOMAIN)
-
- def test_delete_trust(self):
- trustor = unit.new_user_ref(domain_id=self.domain_id)
- trustor = self.identity_api.create_user(trustor)
- trustee = unit.new_user_ref(domain_id=self.domain_id)
- trustee = self.identity_api.create_user(trustee)
- role_ref = unit.new_role_ref()
- trust_ref = unit.new_trust_ref(trustor['id'], trustee['id'])
- self.trust_api.create_trust(trust_ref['id'],
- trust_ref,
- [role_ref])
- self.trust_api.delete_trust(trust_ref['id'])
- self._assert_last_note(
- trust_ref['id'], DELETED_OPERATION, 'OS-TRUST:trust')
- self._assert_last_audit(trust_ref['id'], DELETED_OPERATION,
- 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
-
- def test_create_endpoint(self):
- endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
- self._assert_notify_sent(endpoint_ref['id'], CREATED_OPERATION,
- 'endpoint')
- self._assert_last_audit(endpoint_ref['id'], CREATED_OPERATION,
- 'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
-
- def test_update_endpoint(self):
- endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
- self.catalog_api.update_endpoint(endpoint_ref['id'], endpoint_ref)
- self._assert_notify_sent(endpoint_ref['id'], UPDATED_OPERATION,
- 'endpoint')
- self._assert_last_audit(endpoint_ref['id'], UPDATED_OPERATION,
- 'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
-
- def test_delete_endpoint(self):
- endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
- self.catalog_api.delete_endpoint(endpoint_ref['id'])
- self._assert_notify_sent(endpoint_ref['id'], DELETED_OPERATION,
- 'endpoint')
- self._assert_last_audit(endpoint_ref['id'], DELETED_OPERATION,
- 'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
-
- def test_create_service(self):
- service_ref = unit.new_service_ref()
- self.catalog_api.create_service(service_ref['id'], service_ref)
- self._assert_notify_sent(service_ref['id'], CREATED_OPERATION,
- 'service')
- self._assert_last_audit(service_ref['id'], CREATED_OPERATION,
- 'service', cadftaxonomy.SECURITY_SERVICE)
-
- def test_update_service(self):
- service_ref = unit.new_service_ref()
- self.catalog_api.create_service(service_ref['id'], service_ref)
- self.catalog_api.update_service(service_ref['id'], service_ref)
- self._assert_notify_sent(service_ref['id'], UPDATED_OPERATION,
- 'service')
- self._assert_last_audit(service_ref['id'], UPDATED_OPERATION,
- 'service', cadftaxonomy.SECURITY_SERVICE)
-
- def test_delete_service(self):
- service_ref = unit.new_service_ref()
- self.catalog_api.create_service(service_ref['id'], service_ref)
- self.catalog_api.delete_service(service_ref['id'])
- self._assert_notify_sent(service_ref['id'], DELETED_OPERATION,
- 'service')
- self._assert_last_audit(service_ref['id'], DELETED_OPERATION,
- 'service', cadftaxonomy.SECURITY_SERVICE)
-
- def test_create_region(self):
- region_ref = unit.new_region_ref()
- self.catalog_api.create_region(region_ref)
- self._assert_notify_sent(region_ref['id'], CREATED_OPERATION,
- 'region')
- self._assert_last_audit(region_ref['id'], CREATED_OPERATION,
- 'region', cadftaxonomy.SECURITY_REGION)
-
- def test_update_region(self):
- region_ref = unit.new_region_ref()
- self.catalog_api.create_region(region_ref)
- self.catalog_api.update_region(region_ref['id'], region_ref)
- self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION,
- 'region')
- self._assert_last_audit(region_ref['id'], UPDATED_OPERATION,
- 'region', cadftaxonomy.SECURITY_REGION)
-
- def test_delete_region(self):
- region_ref = unit.new_region_ref()
- self.catalog_api.create_region(region_ref)
- self.catalog_api.delete_region(region_ref['id'])
- self._assert_notify_sent(region_ref['id'], DELETED_OPERATION,
- 'region')
- self._assert_last_audit(region_ref['id'], DELETED_OPERATION,
- 'region', cadftaxonomy.SECURITY_REGION)
-
- def test_create_policy(self):
- policy_ref = unit.new_policy_ref()
- self.policy_api.create_policy(policy_ref['id'], policy_ref)
- self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION,
- 'policy')
- self._assert_last_audit(policy_ref['id'], CREATED_OPERATION,
- 'policy', cadftaxonomy.SECURITY_POLICY)
-
- def test_update_policy(self):
- policy_ref = unit.new_policy_ref()
- self.policy_api.create_policy(policy_ref['id'], policy_ref)
- self.policy_api.update_policy(policy_ref['id'], policy_ref)
- self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION,
- 'policy')
- self._assert_last_audit(policy_ref['id'], UPDATED_OPERATION,
- 'policy', cadftaxonomy.SECURITY_POLICY)
-
- def test_delete_policy(self):
- policy_ref = unit.new_policy_ref()
- self.policy_api.create_policy(policy_ref['id'], policy_ref)
- self.policy_api.delete_policy(policy_ref['id'])
- self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION,
- 'policy')
- self._assert_last_audit(policy_ref['id'], DELETED_OPERATION,
- 'policy', cadftaxonomy.SECURITY_POLICY)
-
- def test_disable_domain(self):
- domain_ref = unit.new_domain_ref()
- self.resource_api.create_domain(domain_ref['id'], domain_ref)
- domain_ref['enabled'] = False
- self.resource_api.update_domain(domain_ref['id'], domain_ref)
- self._assert_notify_sent(domain_ref['id'], 'disabled', 'domain',
- public=False)
-
- def test_disable_of_disabled_domain_does_not_notify(self):
- domain_ref = unit.new_domain_ref(enabled=False)
- self.resource_api.create_domain(domain_ref['id'], domain_ref)
- # The domain_ref above is not changed during the create process. We
- # can use the same ref to perform the update.
- self.resource_api.update_domain(domain_ref['id'], domain_ref)
- self._assert_notify_not_sent(domain_ref['id'], 'disabled', 'domain',
- public=False)
-
- def test_update_group(self):
- group_ref = unit.new_group_ref(domain_id=self.domain_id)
- group_ref = self.identity_api.create_group(group_ref)
- self.identity_api.update_group(group_ref['id'], group_ref)
- self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group')
- self._assert_last_audit(group_ref['id'], UPDATED_OPERATION, 'group',
- cadftaxonomy.SECURITY_GROUP)
-
- def test_update_project(self):
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.resource_api.update_project(project_ref['id'], project_ref)
- self._assert_notify_sent(
- project_ref['id'], UPDATED_OPERATION, 'project', public=True)
- self._assert_last_audit(project_ref['id'], UPDATED_OPERATION,
- 'project', cadftaxonomy.SECURITY_PROJECT)
-
- def test_disable_project(self):
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- project_ref['enabled'] = False
- self.resource_api.update_project(project_ref['id'], project_ref)
- self._assert_notify_sent(project_ref['id'], 'disabled', 'project',
- public=False)
-
- def test_disable_of_disabled_project_does_not_notify(self):
- project_ref = unit.new_project_ref(domain_id=self.domain_id,
- enabled=False)
- self.resource_api.create_project(project_ref['id'], project_ref)
- # The project_ref above is not changed during the create process. We
- # can use the same ref to perform the update.
- self.resource_api.update_project(project_ref['id'], project_ref)
- self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project',
- public=False)
-
- def test_update_project_does_not_send_disable(self):
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- project_ref['enabled'] = True
- self.resource_api.update_project(project_ref['id'], project_ref)
- self._assert_last_note(
- project_ref['id'], UPDATED_OPERATION, 'project')
- self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project')
-
- def test_update_role(self):
- role_ref = unit.new_role_ref()
- self.role_api.create_role(role_ref['id'], role_ref)
- self.role_api.update_role(role_ref['id'], role_ref)
- self._assert_last_note(role_ref['id'], UPDATED_OPERATION, 'role')
- self._assert_last_audit(role_ref['id'], UPDATED_OPERATION, 'role',
- cadftaxonomy.SECURITY_ROLE)
-
- def test_update_user(self):
- user_ref = unit.new_user_ref(domain_id=self.domain_id)
- user_ref = self.identity_api.create_user(user_ref)
- self.identity_api.update_user(user_ref['id'], user_ref)
- self._assert_last_note(user_ref['id'], UPDATED_OPERATION, 'user')
- self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user',
- cadftaxonomy.SECURITY_ACCOUNT_USER)
-
- def test_config_option_no_events(self):
- self.config_fixture.config(notification_format='basic')
- role_ref = unit.new_role_ref()
- self.role_api.create_role(role_ref['id'], role_ref)
- # The regular notifications will still be emitted, since they are
- # used for callback handling.
- self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
- # No audit event should have occurred
- self.assertEqual(0, len(self._audits))
-
- def test_add_user_to_group(self):
- user_ref = unit.new_user_ref(domain_id=self.domain_id)
- user_ref = self.identity_api.create_user(user_ref)
- group_ref = unit.new_group_ref(domain_id=self.domain_id)
- group_ref = self.identity_api.create_group(group_ref)
- self.identity_api.add_user_to_group(user_ref['id'], group_ref['id'])
- self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group',
- actor_id=user_ref['id'], actor_type='user',
- actor_operation='added')
-
- def test_remove_user_from_group(self):
- user_ref = unit.new_user_ref(domain_id=self.domain_id)
- user_ref = self.identity_api.create_user(user_ref)
- group_ref = unit.new_group_ref(domain_id=self.domain_id)
- group_ref = self.identity_api.create_group(group_ref)
- self.identity_api.add_user_to_group(user_ref['id'], group_ref['id'])
- self.identity_api.remove_user_from_group(user_ref['id'],
- group_ref['id'])
- self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group',
- actor_id=user_ref['id'], actor_type='user',
- actor_operation='removed')
-
-
-class CADFNotificationsForEntities(NotificationsForEntities):
-
- def setUp(self):
- super(CADFNotificationsForEntities, self).setUp()
- self.config_fixture.config(notification_format='cadf')
-
- def test_initiator_data_is_set(self):
- ref = unit.new_domain_ref()
- resp = self.post('/domains', body={'domain': ref})
- resource_id = resp.result.get('domain').get('id')
- self._assert_last_audit(resource_id, CREATED_OPERATION, 'domain',
- cadftaxonomy.SECURITY_DOMAIN)
- self._assert_initiator_data_is_set(CREATED_OPERATION,
- 'domain',
- cadftaxonomy.SECURITY_DOMAIN)
-
-
-class V2Notifications(BaseNotificationTest):
-
- def setUp(self):
- super(V2Notifications, self).setUp()
- self.config_fixture.config(notification_format='cadf')
-
- def test_user(self):
- token = self.get_scoped_token()
- resp = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- 'enabled': True,
- },
- },
- token=token,
- )
- user_id = resp.result.get('user').get('id')
- self._assert_initiator_data_is_set(CREATED_OPERATION,
- 'user',
- cadftaxonomy.SECURITY_ACCOUNT_USER)
- # test for delete user
- self.admin_request(
- method='DELETE',
- path='/v2.0/users/%s' % user_id,
- token=token,
- )
- self._assert_initiator_data_is_set(DELETED_OPERATION,
- 'user',
- cadftaxonomy.SECURITY_ACCOUNT_USER)
-
- def test_role(self):
- token = self.get_scoped_token()
- resp = self.admin_request(
- method='POST',
- path='/v2.0/OS-KSADM/roles',
- body={
- 'role': {
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- },
- },
- token=token,
- )
- role_id = resp.result.get('role').get('id')
- self._assert_initiator_data_is_set(CREATED_OPERATION,
- 'role',
- cadftaxonomy.SECURITY_ROLE)
- # test for delete role
- self.admin_request(
- method='DELETE',
- path='/v2.0/OS-KSADM/roles/%s' % role_id,
- token=token,
- )
- self._assert_initiator_data_is_set(DELETED_OPERATION,
- 'role',
- cadftaxonomy.SECURITY_ROLE)
-
- def test_service_and_endpoint(self):
- token = self.get_scoped_token()
- resp = self.admin_request(
- method='POST',
- path='/v2.0/OS-KSADM/services',
- body={
- 'OS-KSADM:service': {
- 'name': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- },
- },
- token=token,
- )
- service_id = resp.result.get('OS-KSADM:service').get('id')
- self._assert_initiator_data_is_set(CREATED_OPERATION,
- 'service',
- cadftaxonomy.SECURITY_SERVICE)
- resp = self.admin_request(
- method='POST',
- path='/v2.0/endpoints',
- body={
- 'endpoint': {
- 'region': uuid.uuid4().hex,
- 'service_id': service_id,
- 'publicurl': uuid.uuid4().hex,
- 'adminurl': uuid.uuid4().hex,
- 'internalurl': uuid.uuid4().hex,
- },
- },
- token=token,
- )
- endpoint_id = resp.result.get('endpoint').get('id')
- self._assert_initiator_data_is_set(CREATED_OPERATION,
- 'endpoint',
- cadftaxonomy.SECURITY_ENDPOINT)
- # test for delete endpoint
- self.admin_request(
- method='DELETE',
- path='/v2.0/endpoints/%s' % endpoint_id,
- token=token,
- )
- self._assert_initiator_data_is_set(DELETED_OPERATION,
- 'endpoint',
- cadftaxonomy.SECURITY_ENDPOINT)
- # test for delete service
- self.admin_request(
- method='DELETE',
- path='/v2.0/OS-KSADM/services/%s' % service_id,
- token=token,
- )
- self._assert_initiator_data_is_set(DELETED_OPERATION,
- 'service',
- cadftaxonomy.SECURITY_SERVICE)
-
- def test_project(self):
- token = self.get_scoped_token()
- resp = self.admin_request(
- method='POST',
- path='/v2.0/tenants',
- body={
- 'tenant': {
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True
- },
- },
- token=token,
- )
- project_id = resp.result.get('tenant').get('id')
- self._assert_initiator_data_is_set(CREATED_OPERATION,
- 'project',
- cadftaxonomy.SECURITY_PROJECT)
- # test for delete project
- self.admin_request(
- method='DELETE',
- path='/v2.0/tenants/%s' % project_id,
- token=token,
- )
- self._assert_initiator_data_is_set(DELETED_OPERATION,
- 'project',
- cadftaxonomy.SECURITY_PROJECT)
-
-
-class TestEventCallbacks(test_v3.RestfulTestCase):
-
- def setUp(self):
- super(TestEventCallbacks, self).setUp()
- self.has_been_called = False
-
- def _project_deleted_callback(self, service, resource_type, operation,
- payload):
- self.has_been_called = True
-
- def _project_created_callback(self, service, resource_type, operation,
- payload):
- self.has_been_called = True
-
- def test_notification_received(self):
- callback = register_callback(CREATED_OPERATION, 'project')
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.assertTrue(callback.called)
-
- def test_notification_method_not_callable(self):
- fake_method = None
- self.assertRaises(TypeError,
- notifications.register_event_callback,
- UPDATED_OPERATION,
- 'project',
- [fake_method])
-
- def test_notification_event_not_valid(self):
- self.assertRaises(ValueError,
- notifications.register_event_callback,
- uuid.uuid4().hex,
- 'project',
- self._project_deleted_callback)
-
- def test_event_registration_for_unknown_resource_type(self):
- # Registration for unknown resource types should succeed. If no event
- # is issued for that resource type, the callback wont be triggered.
- notifications.register_event_callback(DELETED_OPERATION,
- uuid.uuid4().hex,
- self._project_deleted_callback)
- resource_type = uuid.uuid4().hex
- notifications.register_event_callback(DELETED_OPERATION,
- resource_type,
- self._project_deleted_callback)
-
- def test_provider_event_callback_subscription(self):
- callback_called = []
-
- @notifications.listener
- class Foo(object):
- def __init__(self):
- self.event_callbacks = {
- CREATED_OPERATION: {'project': self.foo_callback}}
-
- def foo_callback(self, service, resource_type, operation,
- payload):
- # uses callback_called from the closure
- callback_called.append(True)
-
- Foo()
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.assertEqual([True], callback_called)
-
- def test_provider_event_callbacks_subscription(self):
- callback_called = []
-
- @notifications.listener
- class Foo(object):
- def __init__(self):
- self.event_callbacks = {
- CREATED_OPERATION: {
- 'project': [self.callback_0, self.callback_1]}}
-
- def callback_0(self, service, resource_type, operation, payload):
- # uses callback_called from the closure
- callback_called.append('cb0')
-
- def callback_1(self, service, resource_type, operation, payload):
- # uses callback_called from the closure
- callback_called.append('cb1')
-
- Foo()
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.assertItemsEqual(['cb1', 'cb0'], callback_called)
-
- def test_invalid_event_callbacks(self):
- @notifications.listener
- class Foo(object):
- def __init__(self):
- self.event_callbacks = 'bogus'
-
- self.assertRaises(AttributeError, Foo)
-
- def test_invalid_event_callbacks_event(self):
- @notifications.listener
- class Foo(object):
- def __init__(self):
- self.event_callbacks = {CREATED_OPERATION: 'bogus'}
-
- self.assertRaises(AttributeError, Foo)
-
- def test_using_an_unbound_method_as_a_callback_fails(self):
- # NOTE(dstanek): An unbound method is when you reference a method
- # from a class object. You'll get a method that isn't bound to a
- # particular instance so there is no magic 'self'. You can call it,
- # but you have to pass in the instance manually like: C.m(C()).
- # If you reference the method from an instance then you get a method
- # that effectively curries the self argument for you
- # (think functools.partial). Obviously is we don't have an
- # instance then we can't call the method.
- @notifications.listener
- class Foo(object):
- def __init__(self):
- self.event_callbacks = {CREATED_OPERATION:
- {'project': Foo.callback}}
-
- def callback(self, *args):
- pass
-
- # TODO(dstanek): it would probably be nice to fail early using
- # something like:
- # self.assertRaises(TypeError, Foo)
- Foo()
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- self.assertRaises(TypeError, self.resource_api.create_project,
- project_ref['id'], project_ref)
-
-
-class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
-
- LOCAL_HOST = 'localhost'
- ACTION = 'authenticate'
- ROLE_ASSIGNMENT = 'role_assignment'
-
- def setUp(self):
- super(CadfNotificationsWrapperTestCase, self).setUp()
- self._notifications = []
-
- def fake_notify(action, initiator, outcome, target,
- event_type, **kwargs):
- service_security = cadftaxonomy.SERVICE_SECURITY
-
- event = eventfactory.EventFactory().new_event(
- eventType=cadftype.EVENTTYPE_ACTIVITY,
- outcome=outcome,
- action=action,
- initiator=initiator,
- target=target,
- observer=cadfresource.Resource(typeURI=service_security))
-
- for key, value in kwargs.items():
- setattr(event, key, value)
-
- note = {
- 'action': action,
- 'initiator': initiator,
- 'event': event,
- 'event_type': event_type,
- 'send_notification_called': True}
- self._notifications.append(note)
-
- self.useFixture(mockpatch.PatchObject(
- notifications, '_send_audit_notification', fake_notify))
-
- def _assert_last_note(self, action, user_id, event_type=None):
- self.assertTrue(self._notifications)
- note = self._notifications[-1]
- self.assertEqual(action, note['action'])
- initiator = note['initiator']
- self.assertEqual(user_id, initiator.id)
- self.assertEqual(self.LOCAL_HOST, initiator.host.address)
- self.assertTrue(note['send_notification_called'])
- if event_type:
- self.assertEqual(event_type, note['event_type'])
-
- def _assert_event(self, role_id, project=None, domain=None,
- user=None, group=None, inherit=False):
- """Assert that the CADF event is valid.
-
- In the case of role assignments, the event will have extra data,
- specifically, the role, target, actor, and if the role is inherited.
-
- An example event, as a dictionary is seen below:
- {
- 'typeURI': 'http://schemas.dmtf.org/cloud/audit/1.0/event',
- 'initiator': {
- 'typeURI': 'service/security/account/user',
- 'host': {'address': 'localhost'},
- 'id': 'openstack:0a90d95d-582c-4efb-9cbc-e2ca7ca9c341',
- 'name': u'bccc2d9bfc2a46fd9e33bcf82f0b5c21'
- },
- 'target': {
- 'typeURI': 'service/security/account/user',
- 'id': 'openstack:d48ea485-ef70-4f65-8d2b-01aa9d7ec12d'
- },
- 'observer': {
- 'typeURI': 'service/security',
- 'id': 'openstack:d51dd870-d929-4aba-8d75-dcd7555a0c95'
- },
- 'eventType': 'activity',
- 'eventTime': '2014-08-21T21:04:56.204536+0000',
- 'role': u'0e6b990380154a2599ce6b6e91548a68',
- 'domain': u'24bdcff1aab8474895dbaac509793de1',
- 'inherited_to_projects': False,
- 'group': u'c1e22dc67cbd469ea0e33bf428fe597a',
- 'action': 'created.role_assignment',
- 'outcome': 'success',
- 'id': 'openstack:782689dd-f428-4f13-99c7-5c70f94a5ac1'
- }
- """
- note = self._notifications[-1]
- event = note['event']
- if project:
- self.assertEqual(project, event.project)
- if domain:
- self.assertEqual(domain, event.domain)
- if group:
- self.assertEqual(group, event.group)
- elif user:
- self.assertEqual(user, event.user)
- self.assertEqual(role_id, event.role)
- self.assertEqual(inherit, event.inherited_to_projects)
-
- def test_v3_authenticate_user_name_and_domain_id(self):
- user_id = self.user_id
- user_name = self.user['name']
- password = self.user['password']
- domain_id = self.domain_id
- data = self.build_authentication_request(username=user_name,
- user_domain_id=domain_id,
- password=password)
- self.post('/auth/tokens', body=data)
- self._assert_last_note(self.ACTION, user_id)
-
- def test_v3_authenticate_user_id(self):
- user_id = self.user_id
- password = self.user['password']
- data = self.build_authentication_request(user_id=user_id,
- password=password)
- self.post('/auth/tokens', body=data)
- self._assert_last_note(self.ACTION, user_id)
-
- def test_v3_authenticate_user_name_and_domain_name(self):
- user_id = self.user_id
- user_name = self.user['name']
- password = self.user['password']
- domain_name = self.domain['name']
- data = self.build_authentication_request(username=user_name,
- user_domain_name=domain_name,
- password=password)
- self.post('/auth/tokens', body=data)
- self._assert_last_note(self.ACTION, user_id)
-
- def _test_role_assignment(self, url, role, project=None, domain=None,
- user=None, group=None):
- self.put(url)
- action = "%s.%s" % (CREATED_OPERATION, self.ROLE_ASSIGNMENT)
- event_type = '%s.%s.%s' % (notifications.SERVICE,
- self.ROLE_ASSIGNMENT, CREATED_OPERATION)
- self._assert_last_note(action, self.user_id, event_type)
- self._assert_event(role, project, domain, user, group)
- self.delete(url)
- action = "%s.%s" % (DELETED_OPERATION, self.ROLE_ASSIGNMENT)
- event_type = '%s.%s.%s' % (notifications.SERVICE,
- self.ROLE_ASSIGNMENT, DELETED_OPERATION)
- self._assert_last_note(action, self.user_id, event_type)
- self._assert_event(role, project, domain, user, None)
-
- def test_user_project_grant(self):
- url = ('/projects/%s/users/%s/roles/%s' %
- (self.project_id, self.user_id, self.role_id))
- self._test_role_assignment(url, self.role_id,
- project=self.project_id,
- user=self.user_id)
-
- def test_group_domain_grant(self):
- group_ref = unit.new_group_ref(domain_id=self.domain_id)
- group = self.identity_api.create_group(group_ref)
- self.identity_api.add_user_to_group(self.user_id, group['id'])
- url = ('/domains/%s/groups/%s/roles/%s' %
- (self.domain_id, group['id'], self.role_id))
- self._test_role_assignment(url, self.role_id,
- domain=self.domain_id,
- user=self.user_id,
- group=group['id'])
-
- def test_add_role_to_user_and_project(self):
- # A notification is sent when add_role_to_user_and_project is called on
- # the assignment manager.
-
- project_ref = unit.new_project_ref(self.domain_id)
- project = self.resource_api.create_project(
- project_ref['id'], project_ref)
- tenant_id = project['id']
-
- self.assignment_api.add_role_to_user_and_project(
- self.user_id, tenant_id, self.role_id)
-
- self.assertTrue(self._notifications)
- note = self._notifications[-1]
- self.assertEqual('created.role_assignment', note['action'])
- self.assertTrue(note['send_notification_called'])
-
- self._assert_event(self.role_id, project=tenant_id, user=self.user_id)
-
- def test_remove_role_from_user_and_project(self):
- # A notification is sent when remove_role_from_user_and_project is
- # called on the assignment manager.
-
- self.assignment_api.remove_role_from_user_and_project(
- self.user_id, self.project_id, self.role_id)
-
- self.assertTrue(self._notifications)
- note = self._notifications[-1]
- self.assertEqual('deleted.role_assignment', note['action'])
- self.assertTrue(note['send_notification_called'])
-
- self._assert_event(self.role_id, project=self.project_id,
- user=self.user_id)
-
-
-class TestCallbackRegistration(unit.BaseTestCase):
- def setUp(self):
- super(TestCallbackRegistration, self).setUp()
- self.mock_log = mock.Mock()
- # Force the callback logging to occur
- self.mock_log.logger.getEffectiveLevel.return_value = logging.DEBUG
-
- def verify_log_message(self, data):
- """Verify log message.
-
- Tests that use this are a little brittle because adding more
- logging can break them.
-
- TODO(dstanek): remove the need for this in a future refactoring
-
- """
- log_fn = self.mock_log.debug
- self.assertEqual(len(data), log_fn.call_count)
- for datum in data:
- log_fn.assert_any_call(mock.ANY, datum)
-
- def test_a_function_callback(self):
- def callback(*args, **kwargs):
- pass
-
- resource_type = 'thing'
- with mock.patch('keystone.notifications.LOG', self.mock_log):
- notifications.register_event_callback(
- CREATED_OPERATION, resource_type, callback)
-
- callback = 'keystone.tests.unit.common.test_notifications.callback'
- expected_log_data = {
- 'callback': callback,
- 'event': 'identity.%s.created' % resource_type
- }
- self.verify_log_message([expected_log_data])
-
- def test_a_method_callback(self):
- class C(object):
- def callback(self, *args, **kwargs):
- pass
-
- with mock.patch('keystone.notifications.LOG', self.mock_log):
- notifications.register_event_callback(
- CREATED_OPERATION, 'thing', C().callback)
-
- callback = 'keystone.tests.unit.common.test_notifications.C.callback'
- expected_log_data = {
- 'callback': callback,
- 'event': 'identity.thing.created'
- }
- self.verify_log_message([expected_log_data])
-
- def test_a_list_of_callbacks(self):
- def callback(*args, **kwargs):
- pass
-
- class C(object):
- def callback(self, *args, **kwargs):
- pass
-
- with mock.patch('keystone.notifications.LOG', self.mock_log):
- notifications.register_event_callback(
- CREATED_OPERATION, 'thing', [callback, C().callback])
-
- callback_1 = 'keystone.tests.unit.common.test_notifications.callback'
- callback_2 = 'keystone.tests.unit.common.test_notifications.C.callback'
- expected_log_data = [
- {
- 'callback': callback_1,
- 'event': 'identity.thing.created'
- },
- {
- 'callback': callback_2,
- 'event': 'identity.thing.created'
- },
- ]
- self.verify_log_message(expected_log_data)
-
- def test_an_invalid_callback(self):
- self.assertRaises(TypeError,
- notifications.register_event_callback,
- (CREATED_OPERATION, 'thing', object()))
-
- def test_an_invalid_event(self):
- def callback(*args, **kwargs):
- pass
-
- self.assertRaises(ValueError,
- notifications.register_event_callback,
- uuid.uuid4().hex,
- 'thing',
- callback)
diff --git a/keystone-moon/keystone/tests/unit/common/test_pemutils.py b/keystone-moon/keystone/tests/unit/common/test_pemutils.py
deleted file mode 100644
index c2f58518..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_pemutils.py
+++ /dev/null
@@ -1,337 +0,0 @@
-# Copyright 2013 Red Hat, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import base64
-
-from six import moves
-
-from keystone.common import pemutils
-from keystone.tests import unit as tests
-
-
-# List of 2-tuples, (pem_type, pem_header)
-headers = pemutils.PEM_TYPE_TO_HEADER.items()
-
-
-def make_data(size, offset=0):
- return ''.join([chr(x % 255) for x in moves.range(offset, size + offset)])
-
-
-def make_base64_from_data(data):
- return base64.b64encode(data)
-
-
-def wrap_base64(base64_text):
- wrapped_text = '\n'.join([base64_text[x:x + 64]
- for x in moves.range(0, len(base64_text), 64)])
- wrapped_text += '\n'
- return wrapped_text
-
-
-def make_pem(header, data):
- base64_text = make_base64_from_data(data)
- wrapped_text = wrap_base64(base64_text)
-
- result = '-----BEGIN %s-----\n' % header
- result += wrapped_text
- result += '-----END %s-----\n' % header
-
- return result
-
-
-class PEM(object):
- """PEM text and it's associated data broken out, used for testing.
-
- """
- def __init__(self, pem_header='CERTIFICATE', pem_type='cert',
- data_size=70, data_offset=0):
- self.pem_header = pem_header
- self.pem_type = pem_type
- self.data_size = data_size
- self.data_offset = data_offset
- self.data = make_data(self.data_size, self.data_offset)
- self.base64_text = make_base64_from_data(self.data)
- self.wrapped_base64 = wrap_base64(self.base64_text)
- self.pem_text = make_pem(self.pem_header, self.data)
-
-
-class TestPEMParseResult(tests.BaseTestCase):
-
- def test_pem_types(self):
- for pem_type in pemutils.pem_types:
- pem_header = pemutils.PEM_TYPE_TO_HEADER[pem_type]
- r = pemutils.PEMParseResult(pem_type=pem_type)
- self.assertEqual(pem_type, r.pem_type)
- self.assertEqual(pem_header, r.pem_header)
-
- pem_type = 'xxx'
- self.assertRaises(ValueError,
- pemutils.PEMParseResult, pem_type=pem_type)
-
- def test_pem_headers(self):
- for pem_header in pemutils.pem_headers:
- pem_type = pemutils.PEM_HEADER_TO_TYPE[pem_header]
- r = pemutils.PEMParseResult(pem_header=pem_header)
- self.assertEqual(pem_type, r.pem_type)
- self.assertEqual(pem_header, r.pem_header)
-
- pem_header = 'xxx'
- self.assertRaises(ValueError,
- pemutils.PEMParseResult, pem_header=pem_header)
-
-
-class TestPEMParse(tests.BaseTestCase):
- def test_parse_none(self):
- text = ''
- text += 'bla bla\n'
- text += 'yada yada yada\n'
- text += 'burfl blatz bingo\n'
-
- parse_results = pemutils.parse_pem(text)
- self.assertEqual(0, len(parse_results))
-
- self.assertEqual(False, pemutils.is_pem(text))
-
- def test_parse_invalid(self):
- p = PEM(pem_type='xxx',
- pem_header='XXX')
- text = p.pem_text
-
- self.assertRaises(ValueError,
- pemutils.parse_pem, text)
-
- def test_parse_one(self):
- data_size = 70
- count = len(headers)
- pems = []
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- p = pems[i]
- text = p.pem_text
-
- parse_results = pemutils.parse_pem(text)
- self.assertEqual(1, len(parse_results))
-
- r = parse_results[0]
- self.assertEqual(p.pem_type, r.pem_type)
- self.assertEqual(p.pem_header, r.pem_header)
- self.assertEqual(p.pem_text,
- text[r.pem_start:r.pem_end])
- self.assertEqual(p.wrapped_base64,
- text[r.base64_start:r.base64_end])
- self.assertEqual(p.data, r.binary_data)
-
- def test_parse_one_embedded(self):
- p = PEM(data_offset=0)
- text = ''
- text += 'bla bla\n'
- text += 'yada yada yada\n'
- text += p.pem_text
- text += 'burfl blatz bingo\n'
-
- parse_results = pemutils.parse_pem(text)
- self.assertEqual(1, len(parse_results))
-
- r = parse_results[0]
- self.assertEqual(p.pem_type, r.pem_type)
- self.assertEqual(p.pem_header, r.pem_header)
- self.assertEqual(p.pem_text,
- text[r.pem_start:r.pem_end])
- self.assertEqual(p.wrapped_base64,
- text[r.base64_start: r.base64_end])
- self.assertEqual(p.data, r.binary_data)
-
- def test_parse_multple(self):
- data_size = 70
- count = len(headers)
- pems = []
- text = ''
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- text += pems[i].pem_text
-
- parse_results = pemutils.parse_pem(text)
- self.assertEqual(count, len(parse_results))
-
- for i in moves.range(count):
- r = parse_results[i]
- p = pems[i]
-
- self.assertEqual(p.pem_type, r.pem_type)
- self.assertEqual(p.pem_header, r.pem_header)
- self.assertEqual(p.pem_text,
- text[r.pem_start:r.pem_end])
- self.assertEqual(p.wrapped_base64,
- text[r.base64_start: r.base64_end])
- self.assertEqual(p.data, r.binary_data)
-
- def test_parse_multple_find_specific(self):
- data_size = 70
- count = len(headers)
- pems = []
- text = ''
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- text += pems[i].pem_text
-
- for i in moves.range(count):
- parse_results = pemutils.parse_pem(text, pem_type=headers[i][0])
- self.assertEqual(1, len(parse_results))
-
- r = parse_results[0]
- p = pems[i]
-
- self.assertEqual(p.pem_type, r.pem_type)
- self.assertEqual(p.pem_header, r.pem_header)
- self.assertEqual(p.pem_text,
- text[r.pem_start:r.pem_end])
- self.assertEqual(p.wrapped_base64,
- text[r.base64_start:r.base64_end])
- self.assertEqual(p.data, r.binary_data)
-
- def test_parse_multple_embedded(self):
- data_size = 75
- count = len(headers)
- pems = []
- text = ''
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- text += 'bla bla\n'
- text += 'yada yada yada\n'
- text += pems[i].pem_text
- text += 'burfl blatz bingo\n'
-
- parse_results = pemutils.parse_pem(text)
- self.assertEqual(count, len(parse_results))
-
- for i in moves.range(count):
- r = parse_results[i]
- p = pems[i]
-
- self.assertEqual(p.pem_type, r.pem_type)
- self.assertEqual(p.pem_header, r.pem_header)
- self.assertEqual(p.pem_text,
- text[r.pem_start:r.pem_end])
- self.assertEqual(p.wrapped_base64,
- text[r.base64_start:r.base64_end])
- self.assertEqual(p.data, r.binary_data)
-
- def test_get_pem_data_none(self):
- text = ''
- text += 'bla bla\n'
- text += 'yada yada yada\n'
- text += 'burfl blatz bingo\n'
-
- data = pemutils.get_pem_data(text)
- self.assertIsNone(data)
-
- def test_get_pem_data_invalid(self):
- p = PEM(pem_type='xxx',
- pem_header='XXX')
- text = p.pem_text
-
- self.assertRaises(ValueError,
- pemutils.get_pem_data, text)
-
- def test_get_pem_data(self):
- data_size = 70
- count = len(headers)
- pems = []
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- p = pems[i]
- text = p.pem_text
-
- data = pemutils.get_pem_data(text, p.pem_type)
- self.assertEqual(p.data, data)
-
- def test_is_pem(self):
- data_size = 70
- count = len(headers)
- pems = []
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- p = pems[i]
- text = p.pem_text
- self.assertTrue(pemutils.is_pem(text, pem_type=p.pem_type))
- self.assertFalse(pemutils.is_pem(text,
- pem_type=p.pem_type + 'xxx'))
-
- def test_base64_to_pem(self):
- data_size = 70
- count = len(headers)
- pems = []
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- p = pems[i]
- pem = pemutils.base64_to_pem(p.base64_text, p.pem_type)
- self.assertEqual(pemutils.get_pem_data(pem, p.pem_type), p.data)
-
- def test_binary_to_pem(self):
- data_size = 70
- count = len(headers)
- pems = []
-
- for i in moves.range(count):
- pems.append(PEM(pem_type=headers[i][0],
- pem_header=headers[i][1],
- data_size=data_size + i,
- data_offset=i))
-
- for i in moves.range(count):
- p = pems[i]
- pem = pemutils.binary_to_pem(p.data, p.pem_type)
- self.assertEqual(pemutils.get_pem_data(pem, p.pem_type), p.data)
diff --git a/keystone-moon/keystone/tests/unit/common/test_sql_core.py b/keystone-moon/keystone/tests/unit/common/test_sql_core.py
deleted file mode 100644
index 7d20eb03..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_sql_core.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from sqlalchemy.ext import declarative
-
-from keystone.common import sql
-from keystone.tests import unit
-from keystone.tests.unit import utils
-
-
-ModelBase = declarative.declarative_base()
-
-
-class TestModel(ModelBase, sql.ModelDictMixin):
- __tablename__ = 'testmodel'
- id = sql.Column(sql.String(64), primary_key=True)
- text = sql.Column(sql.String(64), nullable=False)
-
-
-class TestModelDictMixin(unit.BaseTestCase):
-
- def test_creating_a_model_instance_from_a_dict(self):
- d = {'id': utils.new_uuid(), 'text': utils.new_uuid()}
- m = TestModel.from_dict(d)
- self.assertEqual(d['id'], m.id)
- self.assertEqual(d['text'], m.text)
-
- def test_creating_a_dict_from_a_model_instance(self):
- m = TestModel(id=utils.new_uuid(), text=utils.new_uuid())
- d = m.to_dict()
- self.assertEqual(d['id'], m.id)
- self.assertEqual(d['text'], m.text)
-
- def test_creating_a_model_instance_from_an_invalid_dict(self):
- d = {'id': utils.new_uuid(), 'text': utils.new_uuid(), 'extra': None}
- self.assertRaises(TypeError, TestModel.from_dict, d)
-
- def test_creating_a_dict_from_a_model_instance_that_has_extra_attrs(self):
- expected = {'id': utils.new_uuid(), 'text': utils.new_uuid()}
- m = TestModel(id=expected['id'], text=expected['text'])
- m.extra = 'this should not be in the dictionary'
- self.assertEqual(expected, m.to_dict())
diff --git a/keystone-moon/keystone/tests/unit/common/test_utils.py b/keystone-moon/keystone/tests/unit/common/test_utils.py
deleted file mode 100644
index 3641aacd..00000000
--- a/keystone-moon/keystone/tests/unit/common/test_utils.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# encoding: utf-8
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import uuid
-
-from oslo_config import cfg
-from oslo_config import fixture as config_fixture
-from oslo_serialization import jsonutils
-import six
-
-from keystone.common import utils as common_utils
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import utils
-from keystone.version import service
-
-
-CONF = cfg.CONF
-
-TZ = utils.TZ
-
-
-class UtilsTestCase(unit.BaseTestCase):
- OPTIONAL = object()
-
- def setUp(self):
- super(UtilsTestCase, self).setUp()
- self.config_fixture = self.useFixture(config_fixture.Config(CONF))
-
- def test_resource_uuid(self):
- uuid_str = '536e28c2017e405e89b25a1ed777b952'
- self.assertEqual(uuid_str, common_utils.resource_uuid(uuid_str))
-
- # Exact 64 length string.
- uuid_str = ('536e28c2017e405e89b25a1ed777b952'
- 'f13de678ac714bb1b7d1e9a007c10db5')
- resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE
- transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex
- self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str))
-
- # Non-ASCII character test.
- non_ascii_ = 'ß' * 32
- transformed_id = uuid.uuid5(resource_id_namespace, non_ascii_).hex
- self.assertEqual(transformed_id,
- common_utils.resource_uuid(non_ascii_))
-
- # This input is invalid because it's length is more than 64.
- invalid_input = 'x' * 65
- self.assertRaises(ValueError, common_utils.resource_uuid,
- invalid_input)
-
- # 64 length unicode string, to mimic what is returned from mapping_id
- # backend.
- uuid_str = six.text_type('536e28c2017e405e89b25a1ed777b952'
- 'f13de678ac714bb1b7d1e9a007c10db5')
- resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE
- if six.PY2:
- uuid_str = uuid_str.encode('utf-8')
- transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex
- self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str))
-
- def test_hash(self):
- password = 'right'
- wrong = 'wrongwrong' # Two wrongs don't make a right
- hashed = common_utils.hash_password(password)
- self.assertTrue(common_utils.check_password(password, hashed))
- self.assertFalse(common_utils.check_password(wrong, hashed))
-
- def test_verify_normal_password_strict(self):
- self.config_fixture.config(strict_password_check=False)
- password = uuid.uuid4().hex
- verified = common_utils.verify_length_and_trunc_password(password)
- self.assertEqual(password, verified)
-
- def test_that_a_hash_can_not_be_validated_against_a_hash(self):
- # NOTE(dstanek): Bug 1279849 reported a problem where passwords
- # were not being hashed if they already looked like a hash. This
- # would allow someone to hash their password ahead of time
- # (potentially getting around password requirements, like
- # length) and then they could auth with their original password.
- password = uuid.uuid4().hex
- hashed_password = common_utils.hash_password(password)
- new_hashed_password = common_utils.hash_password(hashed_password)
- self.assertFalse(common_utils.check_password(password,
- new_hashed_password))
-
- def test_verify_long_password_strict(self):
- self.config_fixture.config(strict_password_check=False)
- self.config_fixture.config(group='identity', max_password_length=5)
- max_length = CONF.identity.max_password_length
- invalid_password = 'passw0rd'
- trunc = common_utils.verify_length_and_trunc_password(invalid_password)
- self.assertEqual(invalid_password[:max_length], trunc)
-
- def test_verify_long_password_strict_raises_exception(self):
- self.config_fixture.config(strict_password_check=True)
- self.config_fixture.config(group='identity', max_password_length=5)
- invalid_password = 'passw0rd'
- self.assertRaises(exception.PasswordVerificationError,
- common_utils.verify_length_and_trunc_password,
- invalid_password)
-
- def test_hash_long_password_truncation(self):
- self.config_fixture.config(strict_password_check=False)
- invalid_length_password = '0' * 9999999
- hashed = common_utils.hash_password(invalid_length_password)
- self.assertTrue(common_utils.check_password(invalid_length_password,
- hashed))
-
- def test_hash_long_password_strict(self):
- self.config_fixture.config(strict_password_check=True)
- invalid_length_password = '0' * 9999999
- self.assertRaises(exception.PasswordVerificationError,
- common_utils.hash_password,
- invalid_length_password)
-
- def _create_test_user(self, password=OPTIONAL):
- user = {"name": "hthtest"}
- if password is not self.OPTIONAL:
- user['password'] = password
-
- return user
-
- def test_hash_user_password_without_password(self):
- user = self._create_test_user()
- hashed = common_utils.hash_user_password(user)
- self.assertEqual(user, hashed)
-
- def test_hash_user_password_with_null_password(self):
- user = self._create_test_user(password=None)
- hashed = common_utils.hash_user_password(user)
- self.assertEqual(user, hashed)
-
- def test_hash_user_password_with_empty_password(self):
- password = ''
- user = self._create_test_user(password=password)
- user_hashed = common_utils.hash_user_password(user)
- password_hashed = user_hashed['password']
- self.assertTrue(common_utils.check_password(password, password_hashed))
-
- def test_hash_edge_cases(self):
- hashed = common_utils.hash_password('secret')
- self.assertFalse(common_utils.check_password('', hashed))
- self.assertFalse(common_utils.check_password(None, hashed))
-
- def test_hash_unicode(self):
- password = u'Comment \xe7a va'
- wrong = 'Comment ?a va'
- hashed = common_utils.hash_password(password)
- self.assertTrue(common_utils.check_password(password, hashed))
- self.assertFalse(common_utils.check_password(wrong, hashed))
-
- def test_auth_str_equal(self):
- self.assertTrue(common_utils.auth_str_equal('abc123', 'abc123'))
- self.assertFalse(common_utils.auth_str_equal('a', 'aaaaa'))
- self.assertFalse(common_utils.auth_str_equal('aaaaa', 'a'))
- self.assertFalse(common_utils.auth_str_equal('ABC123', 'abc123'))
-
- def test_unixtime(self):
- global TZ
-
- @utils.timezone
- def _test_unixtime():
- epoch = common_utils.unixtime(dt)
- self.assertEqual(epoch, epoch_ans, "TZ=%s" % TZ)
-
- dt = datetime.datetime(1970, 1, 2, 3, 4, 56, 0)
- epoch_ans = 56 + 4 * 60 + 3 * 3600 + 86400
- for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']:
- TZ = 'UTC' + d
- _test_unixtime()
-
- def test_pki_encoder(self):
- data = {'field': 'value'}
- json = jsonutils.dumps(data, cls=common_utils.PKIEncoder)
- expected_json = '{"field":"value"}'
- self.assertEqual(expected_json, json)
-
- def test_url_safe_check(self):
- base_str = 'i am safe'
- self.assertFalse(common_utils.is_not_url_safe(base_str))
- for i in common_utils.URL_RESERVED_CHARS:
- self.assertTrue(common_utils.is_not_url_safe(base_str + i))
-
- def test_url_safe_with_unicode_check(self):
- base_str = u'i am \xe7afe'
- self.assertFalse(common_utils.is_not_url_safe(base_str))
- for i in common_utils.URL_RESERVED_CHARS:
- self.assertTrue(common_utils.is_not_url_safe(base_str + i))
-
-
-class ServiceHelperTests(unit.BaseTestCase):
-
- @service.fail_gracefully
- def _do_test(self):
- raise Exception("Test Exc")
-
- def test_fail_gracefully(self):
- self.assertRaises(unit.UnexpectedExit, self._do_test)
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf b/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf
deleted file mode 100644
index 2bd0c1a6..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-#Used for running the Migrate tests against a live DB2 Server
-#See _sql_livetest.py
-[database]
-connection = ibm_db_sa://keystone:keystone@/staktest?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf
deleted file mode 100644
index 32161185..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf
+++ /dev/null
@@ -1,5 +0,0 @@
-[ldap]
-url = fake://memory
-user = cn=Admin
-password = password
-suffix = cn=example,cn=com
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf
deleted file mode 100644
index 36fa1ac9..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf
+++ /dev/null
@@ -1,41 +0,0 @@
-[ldap]
-url = fakepool://memory
-user = cn=Admin
-password = password
-backend_entities = ['Tenant', 'User', 'UserRoleAssociation', 'Role', 'Group', 'Domain']
-suffix = cn=example,cn=com
-
-# Connection pooling specific attributes
-
-# Enable LDAP connection pooling. (boolean value)
-use_pool=true
-
-# Connection pool size. (integer value)
-pool_size=5
-
-# Maximum count of reconnect trials. (integer value)
-pool_retry_max=2
-
-# Time span in seconds to wait between two reconnect trials.
-# (floating point value)
-pool_retry_delay=0.2
-
-# Connector timeout in seconds. Value -1 indicates indefinite
-# wait for response. (integer value)
-pool_connection_timeout=-1
-
-# Connection lifetime in seconds.
-# (integer value)
-pool_connection_lifetime=600
-
-# Enable LDAP connection pooling for end user authentication.
-# If use_pool is disabled, then this setting is meaningless
-# and is not used at all. (boolean value)
-use_auth_pool=true
-
-# End user auth connection pool size. (integer value)
-auth_pool_size=50
-
-# End user auth connection lifetime in seconds. (integer
-# value)
-auth_pool_connection_lifetime=60 \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
deleted file mode 100644
index 96a0ffa9..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
+++ /dev/null
@@ -1,14 +0,0 @@
-[database]
-#For a specific location file based SQLite use:
-#connection = sqlite:////tmp/keystone.db
-#To Test MySQL:
-#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
-#To Test PostgreSQL:
-#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
-idle_timeout = 200
-
-[ldap]
-url = fake://memory
-user = cn=Admin
-password = password
-suffix = cn=example,cn=com
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf
deleted file mode 100644
index bb9ee08f..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf
+++ /dev/null
@@ -1,10 +0,0 @@
-[ldap]
-url = ldap://localhost
-user = cn=Manager,dc=openstack,dc=org
-password = test
-suffix = dc=openstack,dc=org
-group_tree_dn = ou=UserGroups,dc=openstack,dc=org
-user_tree_dn = ou=Users,dc=openstack,dc=org
-user_enabled_emulation = True
-user_mail_attribute = mail
-use_dumb_member = True
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf
deleted file mode 100644
index 5185770b..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf
+++ /dev/null
@@ -1,9 +0,0 @@
-[database]
-connection = sqlite://
-#For a file based sqlite use
-#connection = sqlite:////tmp/keystone.db
-#To Test MySQL:
-#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
-#To Test PostgreSQL:
-#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
-idle_timeout = 200
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
deleted file mode 100644
index 2495f036..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-#Used for running the Migrate tests against a live MySQL Server
-#See _sql_livetest.py
-[database]
-connection = mysql+pymysql://keystone:keystone@localhost/keystone_test?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf
deleted file mode 100644
index c36e05f9..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf
+++ /dev/null
@@ -1,32 +0,0 @@
-[ldap]
-url = ldap://localhost
-user = cn=Manager,dc=openstack,dc=org
-password = test
-suffix = dc=openstack,dc=org
-group_tree_dn = ou=UserGroups,dc=openstack,dc=org
-user_tree_dn = ou=Users,dc=openstack,dc=org
-user_enabled_emulation = True
-user_mail_attribute = mail
-use_dumb_member = True
-
-# Connection pooling specific attributes
-
-# Enable LDAP connection pooling. (boolean value)
-use_pool=true
-# Connection pool size. (integer value)
-pool_size=5
-# Connection lifetime in seconds.
-# (integer value)
-pool_connection_lifetime=60
-
-# Enable LDAP connection pooling for end user authentication.
-# If use_pool is disabled, then this setting is meaningless
-# and is not used at all. (boolean value)
-use_auth_pool=true
-
-# End user auth connection pool size. (integer value)
-auth_pool_size=50
-
-# End user auth connection lifetime in seconds. (integer
-# value)
-auth_pool_connection_lifetime=300 \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf
deleted file mode 100644
index 001805df..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-#Used for running the Migrate tests against a live Postgresql Server
-#See _sql_livetest.py
-[database]
-connection = postgresql://keystone:keystone@localhost/keystone_test?client_encoding=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
deleted file mode 100644
index f2828e2e..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-[database]
-#For a specific location file based SQLite use:
-#connection = sqlite:////tmp/keystone.db
-#To Test MySQL:
-#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
-#To Test PostgreSQL:
-#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8
-idle_timeout = 200
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf
deleted file mode 100644
index b66044b7..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf
+++ /dev/null
@@ -1,14 +0,0 @@
-[ldap]
-url = ldap://
-user = dc=Manager,dc=openstack,dc=org
-password = test
-suffix = dc=openstack,dc=org
-group_tree_dn = ou=UserGroups,dc=openstack,dc=org
-user_tree_dn = ou=Users,dc=openstack,dc=org
-user_enabled_emulation = True
-user_mail_attribute = mail
-use_dumb_member = True
-use_tls = True
-tls_cacertfile = /etc/keystone/ssl/certs/cacert.pem
-tls_cacertdir = /etc/keystone/ssl/certs/
-tls_req_cert = demand
diff --git a/keystone-moon/keystone/tests/unit/config_files/deprecated.conf b/keystone-moon/keystone/tests/unit/config_files/deprecated.conf
deleted file mode 100644
index 515e663a..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/deprecated.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-# Options in this file are deprecated. See test_config.
-
-[sql]
-# These options were deprecated in Icehouse with the switch to oslo's
-# db.sqlalchemy.
-
-connection = sqlite://deprecated
-idle_timeout = 54321
diff --git a/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf b/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf
deleted file mode 100644
index 1d1c926f..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf
+++ /dev/null
@@ -1,15 +0,0 @@
-# Options in this file are deprecated. See test_config.
-
-[sql]
-# These options were deprecated in Icehouse with the switch to oslo's
-# db.sqlalchemy.
-
-connection = sqlite://deprecated
-idle_timeout = 54321
-
-
-[database]
-# These are the new options from the [sql] section.
-
-connection = sqlite://new
-idle_timeout = 65432
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf
deleted file mode 100644
index fecc7bea..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf
+++ /dev/null
@@ -1,5 +0,0 @@
-# The domain-specific configuration file for the test domain
-# 'domain1' for use with unit tests.
-
-[identity]
-driver = sql \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
deleted file mode 100644
index 64d01d48..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
+++ /dev/null
@@ -1,14 +0,0 @@
-# The domain-specific configuration file for the default domain for
-# use with unit tests.
-#
-# The domain_name of the default domain is 'Default', hence the
-# strange mix of upper/lower case in the file name.
-
-[ldap]
-url = fake://memory
-user = cn=Admin
-password = password
-suffix = cn=example,cn=com
-
-[identity]
-driver = ldap
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
deleted file mode 100644
index af540537..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
+++ /dev/null
@@ -1,12 +0,0 @@
-# The domain-specific configuration file for the test domain
-# 'domain1' for use with unit tests.
-
-[ldap]
-url = fake://memory1
-user = cn=Admin
-password = password
-suffix = cn=example,cn=com
-
-[identity]
-driver = ldap
-list_limit = 101
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf
deleted file mode 100644
index a14179e3..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf
+++ /dev/null
@@ -1,13 +0,0 @@
-# The domain-specific configuration file for the test domain
-# 'domain2' for use with unit tests.
-
-[ldap]
-url = fake://memory
-user = cn=Admin
-password = password
-suffix = cn=myroot,cn=com
-group_tree_dn = ou=UserGroups,dc=myroot,dc=org
-user_tree_dn = ou=Users,dc=myroot,dc=org
-
-[identity]
-driver = ldap \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf
deleted file mode 100644
index 925b26f2..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf
+++ /dev/null
@@ -1,5 +0,0 @@
-# The domain-specific configuration file for the test domain
-# 'domain2' for use with unit tests.
-
-[identity]
-driver = sql \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf
deleted file mode 100644
index 2dd86c25..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf
+++ /dev/null
@@ -1,14 +0,0 @@
-# The domain-specific configuration file for the default domain for
-# use with unit tests.
-#
-# The domain_name of the default domain is 'Default', hence the
-# strange mix of upper/lower case in the file name.
-
-[ldap]
-url = fake://memory
-user = cn=Admin
-password = password
-suffix = cn=example,cn=com
-
-[identity]
-driver = ldap \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf
deleted file mode 100644
index fecc7bea..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf
+++ /dev/null
@@ -1,5 +0,0 @@
-# The domain-specific configuration file for the test domain
-# 'domain1' for use with unit tests.
-
-[identity]
-driver = sql \ No newline at end of file
diff --git a/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf b/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf
deleted file mode 100644
index 4a9e87d5..00000000
--- a/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[auth]
-methods = external,password,token,simple_challenge_response,saml2,openid,x509
-simple_challenge_response = keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse
-
diff --git a/keystone-moon/keystone/tests/unit/contrib/__init__.py b/keystone-moon/keystone/tests/unit/contrib/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/contrib/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py b/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py b/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py
deleted file mode 100644
index 52a6095b..00000000
--- a/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py
+++ /dev/null
@@ -1,725 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-from oslo_config import fixture as config_fixture
-from oslo_serialization import jsonutils
-
-from keystone.auth.plugins import mapped
-from keystone import exception
-from keystone.federation import utils as mapping_utils
-from keystone.tests import unit
-from keystone.tests.unit import mapping_fixtures
-
-
-FAKE_MAPPING_ID = uuid.uuid4().hex
-
-
-class MappingRuleEngineTests(unit.BaseTestCase):
- """A class for testing the mapping rule engine."""
-
- def assertValidMappedUserObject(self, mapped_properties,
- user_type='ephemeral',
- domain_id=None):
- """Check whether mapped properties object has 'user' within.
-
- According to today's rules, RuleProcessor does not have to issue user's
- id or name. What's actually required is user's type and for ephemeral
- users that would be service domain named 'Federated'.
- """
- self.assertIn('user', mapped_properties,
- message='Missing user object in mapped properties')
- user = mapped_properties['user']
- self.assertIn('type', user)
- self.assertEqual(user_type, user['type'])
- self.assertIn('domain', user)
- domain = user['domain']
- domain_name_or_id = domain.get('id') or domain.get('name')
- domain_ref = domain_id or 'Federated'
- self.assertEqual(domain_ref, domain_name_or_id)
-
- def test_rule_engine_any_one_of_and_direct_mapping(self):
- """Should return user's name and group id EMPLOYEE_GROUP_ID.
-
- The ADMIN_ASSERTION should successfully have a match in MAPPING_LARGE.
- They will test the case where `any_one_of` is valid, and there is
- a direct mapping for the users name.
-
- """
- mapping = mapping_fixtures.MAPPING_LARGE
- assertion = mapping_fixtures.ADMIN_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- fn = assertion.get('FirstName')
- ln = assertion.get('LastName')
- full_name = '%s %s' % (fn, ln)
- group_ids = values.get('group_ids')
- user_name = values.get('user', {}).get('name')
-
- self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
- self.assertEqual(full_name, user_name)
-
- def test_rule_engine_no_regex_match(self):
- """Should deny authorization, the email of the tester won't match.
-
- This will not match since the email in the assertion will fail
- the regex test. It is set to match any @example.com address.
- But the incoming value is set to eviltester@example.org.
- RuleProcessor should raise ValidationError.
-
- """
- mapping = mapping_fixtures.MAPPING_LARGE
- assertion = mapping_fixtures.BAD_TESTER_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- self.assertRaises(exception.ValidationError,
- rp.process,
- assertion)
-
- def test_rule_engine_regex_many_groups(self):
- """Should return group CONTRACTOR_GROUP_ID.
-
- The TESTER_ASSERTION should successfully have a match in
- MAPPING_TESTER_REGEX. This will test the case where many groups
- are in the assertion, and a regex value is used to try and find
- a match.
-
- """
- mapping = mapping_fixtures.MAPPING_TESTER_REGEX
- assertion = mapping_fixtures.TESTER_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
-
- def test_rule_engine_any_one_of_many_rules(self):
- """Should return group CONTRACTOR_GROUP_ID.
-
- The CONTRACTOR_ASSERTION should successfully have a match in
- MAPPING_SMALL. This will test the case where many rules
- must be matched, including an `any_one_of`, and a direct
- mapping.
-
- """
- mapping = mapping_fixtures.MAPPING_SMALL
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.CONTRACTOR_GROUP_ID, group_ids)
-
- def test_rule_engine_not_any_of_and_direct_mapping(self):
- """Should return user's name and email.
-
- The CUSTOMER_ASSERTION should successfully have a match in
- MAPPING_LARGE. This will test the case where a requirement
- has `not_any_of`, and direct mapping to a username, no group.
-
- """
- mapping = mapping_fixtures.MAPPING_LARGE
- assertion = mapping_fixtures.CUSTOMER_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertEqual([], group_ids,)
-
- def test_rule_engine_not_any_of_many_rules(self):
- """Should return group EMPLOYEE_GROUP_ID.
-
- The EMPLOYEE_ASSERTION should successfully have a match in
- MAPPING_SMALL. This will test the case where many remote
- rules must be matched, including a `not_any_of`.
-
- """
- mapping = mapping_fixtures.MAPPING_SMALL
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids)
-
- def test_rule_engine_not_any_of_regex_verify_pass(self):
- """Should return group DEVELOPER_GROUP_ID.
-
- The DEVELOPER_ASSERTION should successfully have a match in
- MAPPING_DEVELOPER_REGEX. This will test the case where many
- remote rules must be matched, including a `not_any_of`, with
- regex set to True.
-
- """
- mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
- assertion = mapping_fixtures.DEVELOPER_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- self.assertValidMappedUserObject(values)
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
-
- def test_rule_engine_not_any_of_regex_verify_fail(self):
- """Should deny authorization.
-
- The email in the assertion will fail the regex test.
- It is set to reject any @example.org address, but the
- incoming value is set to evildeveloper@example.org.
- RuleProcessor should yield ValidationError.
-
- """
- mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
- assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- self.assertRaises(exception.ValidationError,
- rp.process,
- assertion)
-
- def _rule_engine_regex_match_and_many_groups(self, assertion):
- """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
-
- A helper function injecting assertion passed as an argument.
- Expect DEVELOPER_GROUP_ID and TESTER_GROUP_ID in the results.
-
- """
- mapping = mapping_fixtures.MAPPING_LARGE
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- user_name = assertion.get('UserName')
- group_ids = values.get('group_ids')
- name = values.get('user', {}).get('name')
-
- self.assertValidMappedUserObject(values)
- self.assertEqual(user_name, name)
- self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids)
- self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids)
-
- def test_rule_engine_regex_match_and_many_groups(self):
- """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
-
- The TESTER_ASSERTION should successfully have a match in
- MAPPING_LARGE. This will test a successful regex match
- for an `any_one_of` evaluation type, and will have many
- groups returned.
-
- """
- self._rule_engine_regex_match_and_many_groups(
- mapping_fixtures.TESTER_ASSERTION)
-
- def test_rule_engine_discards_nonstring_objects(self):
- """Check whether RuleProcessor discards non string objects.
-
- Despite the fact that assertion is malformed and contains
- non string objects, RuleProcessor should correctly discard them and
- successfully have a match in MAPPING_LARGE.
-
- """
- self._rule_engine_regex_match_and_many_groups(
- mapping_fixtures.MALFORMED_TESTER_ASSERTION)
-
- def test_rule_engine_fails_after_discarding_nonstring(self):
- """Check whether RuleProcessor discards non string objects.
-
- Expect RuleProcessor to discard non string object, which
- is required for a correct rule match. RuleProcessor will result with
- ValidationError.
-
- """
- mapping = mapping_fixtures.MAPPING_SMALL
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION
- self.assertRaises(exception.ValidationError,
- rp.process,
- assertion)
-
- def test_using_remote_direct_mapping_that_doesnt_exist_fails(self):
- """Test for the correct error when referring to a bad remote match.
-
- The remote match must exist in a rule when a local section refers to
- a remote matching using the format (e.g. {0} in a local section).
- """
- mapping = mapping_fixtures.MAPPING_DIRECT_MAPPING_THROUGH_KEYWORD
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.CUSTOMER_ASSERTION
-
- self.assertRaises(exception.DirectMappingError,
- rp.process,
- assertion)
-
- def test_rule_engine_returns_group_names(self):
- """Check whether RuleProcessor returns group names with their domains.
-
- RuleProcessor should return 'group_names' entry with a list of
- dictionaries with two entries 'name' and 'domain' identifying group by
- its name and domain.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUP_NAMES
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- reference = {
- mapping_fixtures.DEVELOPER_GROUP_NAME:
- {
- "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
- "domain": {
- "name": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_NAME
- }
- },
- mapping_fixtures.TESTER_GROUP_NAME:
- {
- "name": mapping_fixtures.TESTER_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
-
- def test_rule_engine_whitelist_and_direct_groups_mapping(self):
- """Should return user's groups Developer and Contractor.
-
- The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
- in MAPPING_GROUPS_WHITELIST. It will test the case where 'whitelist'
- correctly filters out Manager and only allows Developer and Contractor.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
-
- reference = {
- mapping_fixtures.DEVELOPER_GROUP_NAME:
- {
- "name": mapping_fixtures.DEVELOPER_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- },
- mapping_fixtures.CONTRACTOR_GROUP_NAME:
- {
- "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
-
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual([], mapped_properties['group_ids'])
-
- def test_rule_engine_blacklist_and_direct_groups_mapping(self):
- """Should return user's group Developer.
-
- The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match
- in MAPPING_GROUPS_BLACKLIST. It will test the case where 'blacklist'
- correctly filters out Manager and Developer and only allows Contractor.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
-
- reference = {
- mapping_fixtures.CONTRACTOR_GROUP_NAME:
- {
- "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual([], mapped_properties['group_ids'])
-
- def test_rule_engine_blacklist_and_direct_groups_mapping_multiples(self):
- """Tests matching multiple values before the blacklist.
-
- Verifies that the local indexes are correct when matching multiple
- remote values for a field when the field occurs before the blacklist
- entry in the remote rules.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MULTIPLES
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
-
- reference = {
- mapping_fixtures.CONTRACTOR_GROUP_NAME:
- {
- "name": mapping_fixtures.CONTRACTOR_GROUP_NAME,
- "domain": {
- "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- for rule in mapped_properties['group_names']:
- self.assertDictEqual(reference.get(rule.get('name')), rule)
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual([], mapped_properties['group_ids'])
-
- def test_rule_engine_whitelist_direct_group_mapping_missing_domain(self):
- """Test if the local rule is rejected upon missing domain value
-
- This is a variation with a ``whitelist`` filter.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- self.assertRaises(exception.ValidationError, rp.process, assertion)
-
- def test_rule_engine_blacklist_direct_group_mapping_missing_domain(self):
- """Test if the local rule is rejected upon missing domain value
-
- This is a variation with a ``blacklist`` filter.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- self.assertRaises(exception.ValidationError, rp.process, assertion)
-
- def test_rule_engine_no_groups_allowed(self):
- """Should return user mapped to no groups.
-
- The EMPLOYEE_ASSERTION should successfully have a match
- in MAPPING_GROUPS_WHITELIST, but 'whitelist' should filter out
- the group values from the assertion and thus map to no groups.
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertListEqual(mapped_properties['group_names'], [])
- self.assertListEqual(mapped_properties['group_ids'], [])
- self.assertEqual('tbo', mapped_properties['user']['name'])
-
- def test_mapping_federated_domain_specified(self):
- """Test mapping engine when domain 'ephemeral' is explicitly set.
-
- For that, we use mapping rule MAPPING_EPHEMERAL_USER and assertion
- EMPLOYEE_ASSERTION
-
- """
- mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
-
- def test_set_ephemeral_domain_to_ephemeral_users(self):
- """Test auto assigning service domain to ephemeral users.
-
- Test that ephemeral users will always become members of federated
- service domain. The check depends on ``type`` value which must be set
- to ``ephemeral`` in case of ephemeral user.
-
- """
- mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
-
- def test_local_user_local_domain(self):
- """Test that local users can have non-service domains assigned."""
- mapping = mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(
- mapped_properties, user_type='local',
- domain_id=mapping_fixtures.LOCAL_DOMAIN)
-
- def test_user_identifications_name(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has property type set ('ephemeral')
- - Check if user's name is properly mapped from the assertion
- - Check if unique_id is properly set and equal to display_name,
- as it was not explicitly specified in the mapping.
-
- """
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- self.assertEqual('jsmith', mapped_properties['user']['name'])
- unique_id, display_name = mapped.get_user_unique_id_and_display_name(
- {}, mapped_properties)
- self.assertEqual('jsmith', unique_id)
- self.assertEqual('jsmith', display_name)
-
- def test_user_identifications_name_and_federated_domain(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has propert type set ('ephemeral')
- - Check if user's name is properly mapped from the assertion
- - Check if the unique_id and display_name are properly set
-
- """
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- unique_id, display_name = mapped.get_user_unique_id_and_display_name(
- {}, mapped_properties)
- self.assertEqual('tbo', display_name)
- self.assertEqual('abc123%40example.com', unique_id)
-
- def test_user_identification_id(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has propert type set ('ephemeral')
- - Check if user's display_name is properly set and equal to unique_id,
- as it was not explicitly specified in the mapping.
-
- """
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.ADMIN_ASSERTION
- mapped_properties = rp.process(assertion)
- context = {'environment': {}}
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- unique_id, display_name = mapped.get_user_unique_id_and_display_name(
- context, mapped_properties)
- self.assertEqual('bob', unique_id)
- self.assertEqual('bob', display_name)
-
- def test_user_identification_id_and_name(self):
- """Test varius mapping options and how users are identified.
-
- This test calls mapped.setup_username() for propagating user object.
-
- Test plan:
- - Check if the user has proper domain ('federated') set
- - Check if the user has proper type set ('ephemeral')
- - Check if display_name is properly set from the assertion
- - Check if unique_id is properly set and and equal to value hardcoded
- in the mapping
-
- This test does two iterations with different assertions used as input
- for the Mapping Engine. Different assertions will be matched with
- different rules in the ruleset, effectively issuing different user_id
- (hardcoded values). In the first iteration, the hardcoded user_id is
- not url-safe and we expect Keystone to make it url safe. In the latter
- iteration, provided user_id is already url-safe and we expect server
- not to change it.
-
- """
- testcases = [(mapping_fixtures.CUSTOMER_ASSERTION, 'bwilliams'),
- (mapping_fixtures.EMPLOYEE_ASSERTION, 'tbo')]
- for assertion, exp_user_name in testcases:
- mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- context = {'environment': {}}
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
- unique_id, display_name = (
- mapped.get_user_unique_id_and_display_name(context,
- mapped_properties)
- )
- self.assertEqual(exp_user_name, display_name)
- self.assertEqual('abc123%40example.com', unique_id)
-
- def test_whitelist_pass_through(self):
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = mapping_fixtures.DEVELOPER_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertValidMappedUserObject(mapped_properties)
-
- self.assertEqual('developacct', mapped_properties['user']['name'])
- self.assertEqual('Developer',
- mapped_properties['group_names'][0]['name'])
-
- def test_mapping_with_incorrect_local_keys(self):
- mapping = mapping_fixtures.MAPPING_BAD_LOCAL_SETUP
- self.assertRaises(exception.ValidationError,
- mapping_utils.validate_mapping_structure,
- mapping)
-
- def test_mapping_with_group_name_and_domain(self):
- mapping = mapping_fixtures.MAPPING_GROUP_NAMES
- mapping_utils.validate_mapping_structure(mapping)
-
- def test_type_not_in_assertion(self):
- """Test that if the remote "type" is not in the assertion it fails."""
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- assertion = {uuid.uuid4().hex: uuid.uuid4().hex}
- self.assertRaises(exception.ValidationError,
- rp.process,
- assertion)
-
- def test_rule_engine_group_ids_mapping_whitelist(self):
- """Test mapping engine when group_ids is explicitly set
-
- Also test whitelists on group ids
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_IDS_WHITELIST
- assertion = mapping_fixtures.GROUP_IDS_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertEqual('opilotte', mapped_properties['user']['name'])
- self.assertListEqual([], mapped_properties['group_names'])
- self.assertItemsEqual(['abc123', 'ghi789', 'klm012'],
- mapped_properties['group_ids'])
-
- def test_rule_engine_group_ids_mapping_blacklist(self):
- """Test mapping engine when group_ids is explicitly set.
-
- Also test blacklists on group ids
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_IDS_BLACKLIST
- assertion = mapping_fixtures.GROUP_IDS_ASSERTION
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertEqual('opilotte', mapped_properties['user']['name'])
- self.assertListEqual([], mapped_properties['group_names'])
- self.assertItemsEqual(['abc123', 'ghi789', 'klm012'],
- mapped_properties['group_ids'])
-
- def test_rule_engine_group_ids_mapping_only_one_group(self):
- """Test mapping engine when group_ids is explicitly set.
-
- If the group ids list has only one group,
- test if the transformation is done correctly
-
- """
- mapping = mapping_fixtures.MAPPING_GROUPS_IDS_WHITELIST
- assertion = mapping_fixtures.GROUP_IDS_ASSERTION_ONLY_ONE_GROUP
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertEqual('opilotte', mapped_properties['user']['name'])
- self.assertListEqual([], mapped_properties['group_names'])
- self.assertItemsEqual(['210mlk', '321cba'],
- mapped_properties['group_ids'])
-
-
-class TestUnicodeAssertionData(unit.BaseTestCase):
- """Ensure that unicode data in the assertion headers works.
-
- Bug #1525250 reported that something was not getting correctly encoded
- and/or decoded when assertion data contained non-ASCII characters.
-
- This test class mimics what happens in a real HTTP request.
- """
-
- def setUp(self):
- super(TestUnicodeAssertionData, self).setUp()
- self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF))
- self.config_fixture.config(group='federation',
- assertion_prefix='PFX')
-
- def _pull_mapping_rules_from_the_database(self):
- # NOTE(dstanek): In a live system. The rules are dumped into JSON bytes
- # before being # stored in the database. Upon retrieval the bytes are
- # loaded and the resulting dictionary is full of unicode text strings.
- # Most of tests in this file incorrectly assume the mapping fixture
- # dictionary is the same as what it would look like coming out of the
- # database. The string, when coming out of the database, are all text.
- return jsonutils.loads(jsonutils.dumps(
- mapping_fixtures.MAPPING_UNICODE))
-
- def _pull_assertion_from_the_request_headers(self):
- # NOTE(dstanek): In a live system the bytes for the assertion are
- # pulled from the HTTP headers. These bytes may be decodable as
- # ISO-8859-1 according to Section 3.2.4 of RFC 7230. Let's assume
- # that our web server plugins are correctly encoding the data.
- context = dict(environment=mapping_fixtures.UNICODE_NAME_ASSERTION)
- data = mapping_utils.get_assertion_params_from_env(context)
- # NOTE(dstanek): keystone.auth.plugins.mapped
- return dict(data)
-
- def test_unicode(self):
- mapping = self._pull_mapping_rules_from_the_database()
- assertion = self._pull_assertion_from_the_request_headers()
-
- rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
- values = rp.process(assertion)
-
- fn = assertion.get('PFX_FirstName')
- ln = assertion.get('PFX_LastName')
- full_name = '%s %s' % (fn, ln)
- user_name = values.get('user', {}).get('name')
- self.assertEqual(full_name, user_name)
diff --git a/keystone-moon/keystone/tests/unit/core.py b/keystone-moon/keystone/tests/unit/core.py
deleted file mode 100644
index 1054e131..00000000
--- a/keystone-moon/keystone/tests/unit/core.py
+++ /dev/null
@@ -1,907 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from __future__ import absolute_import
-import atexit
-import base64
-import datetime
-import functools
-import hashlib
-import json
-import logging
-import os
-import re
-import shutil
-import socket
-import sys
-import uuid
-import warnings
-
-import fixtures
-from oslo_config import cfg
-from oslo_config import fixture as config_fixture
-from oslo_context import context as oslo_context
-from oslo_context import fixture as oslo_ctx_fixture
-from oslo_log import fixture as log_fixture
-from oslo_log import log
-from oslo_utils import timeutils
-from oslotest import mockpatch
-from paste.deploy import loadwsgi
-import six
-from sqlalchemy import exc
-import testtools
-from testtools import testcase
-
-# NOTE(ayoung)
-# environment.use_eventlet must run before any of the code that will
-# call the eventlet monkeypatching.
-from keystone.common import environment # noqa
-environment.use_eventlet()
-
-from keystone import auth
-from keystone.common import config
-from keystone.common import dependency
-from keystone.common.kvs import core as kvs_core
-from keystone.common import sql
-from keystone import exception
-from keystone import notifications
-from keystone.server import common
-from keystone.tests.unit import ksfixtures
-from keystone.version import controllers
-from keystone.version import service
-
-
-config.configure()
-
-PID = six.text_type(os.getpid())
-TESTSDIR = os.path.dirname(os.path.abspath(__file__))
-TESTCONF = os.path.join(TESTSDIR, 'config_files')
-ROOTDIR = os.path.normpath(os.path.join(TESTSDIR, '..', '..', '..'))
-VENDOR = os.path.join(ROOTDIR, 'vendor')
-ETCDIR = os.path.join(ROOTDIR, 'etc')
-
-
-def _calc_tmpdir():
- env_val = os.environ.get('KEYSTONE_TEST_TEMP_DIR')
- if not env_val:
- return os.path.join(TESTSDIR, 'tmp', PID)
- return os.path.join(env_val, PID)
-
-
-TMPDIR = _calc_tmpdir()
-
-CONF = cfg.CONF
-log.register_options(CONF)
-
-IN_MEM_DB_CONN_STRING = 'sqlite://'
-
-TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
-
-exception._FATAL_EXCEPTION_FORMAT_ERRORS = True
-os.makedirs(TMPDIR)
-atexit.register(shutil.rmtree, TMPDIR)
-
-
-class dirs(object):
- @staticmethod
- def root(*p):
- return os.path.join(ROOTDIR, *p)
-
- @staticmethod
- def etc(*p):
- return os.path.join(ETCDIR, *p)
-
- @staticmethod
- def tests(*p):
- return os.path.join(TESTSDIR, *p)
-
- @staticmethod
- def tmp(*p):
- return os.path.join(TMPDIR, *p)
-
- @staticmethod
- def tests_conf(*p):
- return os.path.join(TESTCONF, *p)
-
-
-# keystone.common.sql.initialize() for testing.
-DEFAULT_TEST_DB_FILE = dirs.tmp('test.db')
-
-
-class EggLoader(loadwsgi.EggLoader):
- _basket = {}
-
- def find_egg_entry_point(self, object_type, name=None):
- egg_key = '%s:%s' % (object_type, name)
- egg_ep = self._basket.get(egg_key)
- if not egg_ep:
- egg_ep = super(EggLoader, self).find_egg_entry_point(
- object_type, name=name)
- self._basket[egg_key] = egg_ep
- return egg_ep
-
-
-# NOTE(dstanek): class paths were remove from the keystone-paste.ini in
-# favor of using entry points. This caused tests to slow to a crawl
-# since we reload the application object for each RESTful test. This
-# monkey-patching adds caching to paste deploy's egg lookup.
-loadwsgi.EggLoader = EggLoader
-
-
-@atexit.register
-def remove_test_databases():
- db = dirs.tmp('test.db')
- if os.path.exists(db):
- os.unlink(db)
- pristine = dirs.tmp('test.db.pristine')
- if os.path.exists(pristine):
- os.unlink(pristine)
-
-
-def generate_paste_config(extension_name):
- # Generate a file, based on keystone-paste.ini, that is named:
- # extension_name.ini, and includes extension_name in the pipeline
- with open(dirs.etc('keystone-paste.ini'), 'r') as f:
- contents = f.read()
-
- new_contents = contents.replace(' service_v3',
- ' %s service_v3' % (extension_name))
-
- new_paste_file = dirs.tmp(extension_name + '.ini')
- with open(new_paste_file, 'w') as f:
- f.write(new_contents)
-
- return new_paste_file
-
-
-def remove_generated_paste_config(extension_name):
- # Remove the generated paste config file, named extension_name.ini
- paste_file_to_remove = dirs.tmp(extension_name + '.ini')
- os.remove(paste_file_to_remove)
-
-
-def skip_if_cache_disabled(*sections):
- """This decorator is used to skip a test if caching is disabled.
-
- Caching can be disabled either globally or for a specific section.
-
- In the code fragment::
-
- @skip_if_cache_is_disabled('assignment', 'token')
- def test_method(*args):
- ...
-
- The method test_method would be skipped if caching is disabled globally via
- the `enabled` option in the `cache` section of the configuration or if
- the `caching` option is set to false in either `assignment` or `token`
- sections of the configuration. This decorator can be used with no
- arguments to only check global caching.
-
- If a specified configuration section does not define the `caching` option,
- this decorator makes the same assumption as the `should_cache_fn` in
- keystone.common.cache that caching should be enabled.
-
- """
- def wrapper(f):
- @functools.wraps(f)
- def inner(*args, **kwargs):
- if not CONF.cache.enabled:
- raise testcase.TestSkipped('Cache globally disabled.')
- for s in sections:
- conf_sec = getattr(CONF, s, None)
- if conf_sec is not None:
- if not getattr(conf_sec, 'caching', True):
- raise testcase.TestSkipped('%s caching disabled.' % s)
- return f(*args, **kwargs)
- return inner
- return wrapper
-
-
-def skip_if_cache_is_enabled(*sections):
- def wrapper(f):
- @functools.wraps(f)
- def inner(*args, **kwargs):
- if CONF.cache.enabled:
- for s in sections:
- conf_sec = getattr(CONF, s, None)
- if conf_sec is not None:
- if getattr(conf_sec, 'caching', True):
- raise testcase.TestSkipped('%s caching enabled.' %
- s)
- return f(*args, **kwargs)
- return inner
- return wrapper
-
-
-def skip_if_no_multiple_domains_support(f):
- """Decorator to skip tests for identity drivers limited to one domain."""
- @functools.wraps(f)
- def wrapper(*args, **kwargs):
- test_obj = args[0]
- if not test_obj.identity_api.multiple_domains_supported:
- raise testcase.TestSkipped('No multiple domains support')
- return f(*args, **kwargs)
- return wrapper
-
-
-class UnexpectedExit(Exception):
- pass
-
-
-def new_region_ref(parent_region_id=None, **kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'parent_region_id': parent_region_id}
-
- ref.update(kwargs)
- return ref
-
-
-def new_service_ref(**kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'type': uuid.uuid4().hex,
- }
- ref.update(kwargs)
- return ref
-
-
-NEEDS_REGION_ID = object()
-
-
-def new_endpoint_ref(service_id, interface='public',
- region_id=NEEDS_REGION_ID, **kwargs):
-
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'interface': interface,
- 'service_id': service_id,
- 'url': 'https://' + uuid.uuid4().hex + '.com',
- }
-
- if region_id is NEEDS_REGION_ID:
- ref['region_id'] = uuid.uuid4().hex
- elif region_id is None and kwargs.get('region') is not None:
- # pre-3.2 form endpoints are not supported by this function
- raise NotImplementedError("use new_endpoint_ref_with_region")
- else:
- ref['region_id'] = region_id
- ref.update(kwargs)
- return ref
-
-
-def new_endpoint_ref_with_region(service_id, region, interface='public',
- **kwargs):
- """Define an endpoint_ref having a pre-3.2 form.
-
- Contains the deprecated 'region' instead of 'region_id'.
- """
- ref = new_endpoint_ref(service_id, interface, region=region,
- region_id='invalid', **kwargs)
- del ref['region_id']
- return ref
-
-
-def new_domain_ref(**kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True
- }
- ref.update(kwargs)
- return ref
-
-
-def new_project_ref(domain_id=None, is_domain=False, **kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'domain_id': domain_id,
- 'is_domain': is_domain,
- }
- # NOTE(henry-nash): We don't include parent_id in the initial list above
- # since specifying it is optional depending on where the project sits in
- # the hierarchy (and a parent_id of None has meaning - i.e. it's a top
- # level project).
- ref.update(kwargs)
- return ref
-
-
-def new_user_ref(domain_id, project_id=None, **kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'enabled': True,
- 'domain_id': domain_id,
- 'email': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- }
- if project_id:
- ref['default_project_id'] = project_id
- ref.update(kwargs)
- return ref
-
-
-def new_federated_user_ref(idp_id=None, protocol_id=None, **kwargs):
- ref = {
- 'idp_id': idp_id or 'ORG_IDP',
- 'protocol_id': protocol_id or 'saml2',
- 'unique_id': uuid.uuid4().hex,
- 'display_name': uuid.uuid4().hex,
- }
- ref.update(kwargs)
- return ref
-
-
-def new_group_ref(domain_id, **kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'domain_id': domain_id
- }
- ref.update(kwargs)
- return ref
-
-
-def new_credential_ref(user_id, project_id=None, type='cert', **kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'user_id': user_id,
- 'type': type,
- }
-
- if project_id:
- ref['project_id'] = project_id
- if 'blob' not in kwargs:
- ref['blob'] = uuid.uuid4().hex
-
- ref.update(kwargs)
- return ref
-
-
-def new_cert_credential(user_id, project_id=None, blob=None, **kwargs):
- if blob is None:
- blob = {'access': uuid.uuid4().hex, 'secret': uuid.uuid4().hex}
-
- credential = new_credential_ref(user_id=user_id,
- project_id=project_id,
- blob=json.dumps(blob),
- type='cert',
- **kwargs)
- return blob, credential
-
-
-def new_ec2_credential(user_id, project_id=None, blob=None, **kwargs):
- if blob is None:
- blob = {
- 'access': uuid.uuid4().hex,
- 'secret': uuid.uuid4().hex,
- 'trust_id': None
- }
-
- if 'id' not in kwargs:
- access = blob['access'].encode('utf-8')
- kwargs['id'] = hashlib.sha256(access).hexdigest()
-
- credential = new_credential_ref(user_id=user_id,
- project_id=project_id,
- blob=json.dumps(blob),
- type='ec2',
- **kwargs)
- return blob, credential
-
-
-def new_totp_credential(user_id, project_id=None, blob=None):
- if not blob:
- blob = base64.b32encode(uuid.uuid4().hex).rstrip('=')
- credential = new_credential_ref(user_id=user_id,
- project_id=project_id,
- blob=blob,
- type='totp')
- return credential
-
-
-def new_role_ref(**kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': None
- }
- ref.update(kwargs)
- return ref
-
-
-def new_policy_ref(**kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- # Store serialized JSON data as the blob to mimic real world usage.
- 'blob': json.dumps({'data': uuid.uuid4().hex, }),
- 'type': uuid.uuid4().hex,
- }
-
- ref.update(kwargs)
- return ref
-
-
-def new_trust_ref(trustor_user_id, trustee_user_id, project_id=None,
- impersonation=None, expires=None, role_ids=None,
- role_names=None, remaining_uses=None,
- allow_redelegation=False, redelegation_count=None, **kwargs):
- ref = {
- 'id': uuid.uuid4().hex,
- 'trustor_user_id': trustor_user_id,
- 'trustee_user_id': trustee_user_id,
- 'impersonation': impersonation or False,
- 'project_id': project_id,
- 'remaining_uses': remaining_uses,
- 'allow_redelegation': allow_redelegation,
- }
-
- if isinstance(redelegation_count, int):
- ref.update(redelegation_count=redelegation_count)
-
- if isinstance(expires, six.string_types):
- ref['expires_at'] = expires
- elif isinstance(expires, dict):
- ref['expires_at'] = (
- timeutils.utcnow() + datetime.timedelta(**expires)
- ).strftime(TIME_FORMAT)
- elif expires is None:
- pass
- else:
- raise NotImplementedError('Unexpected value for "expires"')
-
- role_ids = role_ids or []
- role_names = role_names or []
- if role_ids or role_names:
- ref['roles'] = []
- for role_id in role_ids:
- ref['roles'].append({'id': role_id})
- for role_name in role_names:
- ref['roles'].append({'name': role_name})
-
- ref.update(kwargs)
- return ref
-
-
-def create_user(api, domain_id, **kwargs):
- """Create a user via the API. Keep the created password.
-
- The password is saved and restored when api.create_user() is called.
- Only use this routine if there is a requirement for the user object to
- have a valid password after api.create_user() is called.
- """
- user = new_user_ref(domain_id=domain_id, **kwargs)
- password = user['password']
- user = api.create_user(user)
- user['password'] = password
- return user
-
-
-class BaseTestCase(testtools.TestCase):
- """Light weight base test class.
-
- This is a placeholder that will eventually go away once the
- setup/teardown in TestCase is properly trimmed down to the bare
- essentials. This is really just a play to speed up the tests by
- eliminating unnecessary work.
- """
-
- def setUp(self):
- super(BaseTestCase, self).setUp()
-
- self.useFixture(fixtures.NestedTempfile())
- self.useFixture(fixtures.TempHomeDir())
-
- self.useFixture(mockpatch.PatchObject(sys, 'exit',
- side_effect=UnexpectedExit))
- self.useFixture(log_fixture.get_logging_handle_error_fixture())
-
- warnings.filterwarnings('error', category=DeprecationWarning,
- module='^keystone\\.')
- warnings.simplefilter('error', exc.SAWarning)
- self.addCleanup(warnings.resetwarnings)
- # Ensure we have an empty threadlocal context at the start of each
- # test.
- self.assertIsNone(oslo_context.get_current())
- self.useFixture(oslo_ctx_fixture.ClearRequestContext())
-
- def cleanup_instance(self, *names):
- """Create a function suitable for use with self.addCleanup.
-
- :returns: a callable that uses a closure to delete instance attributes
-
- """
- def cleanup():
- for name in names:
- # TODO(dstanek): remove this 'if' statement once
- # load_backend in test_backend_ldap is only called once
- # per test
- if hasattr(self, name):
- delattr(self, name)
- return cleanup
-
-
-class TestCase(BaseTestCase):
-
- def config_files(self):
- return []
-
- def _policy_fixture(self):
- return ksfixtures.Policy(dirs.etc('policy.json'), self.config_fixture)
-
- def config_overrides(self):
- # NOTE(morganfainberg): enforce config_overrides can only ever be
- # called a single time.
- assert self.__config_overrides_called is False
- self.__config_overrides_called = True
-
- signing_certfile = 'examples/pki/certs/signing_cert.pem'
- signing_keyfile = 'examples/pki/private/signing_key.pem'
-
- self.useFixture(self._policy_fixture())
-
- self.config_fixture.config(
- # TODO(morganfainberg): Make Cache Testing a separate test case
- # in tempest, and move it out of the base unit tests.
- group='cache',
- backend='dogpile.cache.memory',
- enabled=True,
- proxies=['oslo_cache.testing.CacheIsolatingProxy'])
- self.config_fixture.config(
- group='catalog',
- driver='sql',
- template_file=dirs.tests('default_catalog.templates'))
- self.config_fixture.config(
- group='kvs',
- backends=[
- ('keystone.tests.unit.test_kvs.'
- 'KVSBackendForcedKeyMangleFixture'),
- 'keystone.tests.unit.test_kvs.KVSBackendFixture'])
- self.config_fixture.config(
- group='signing', certfile=signing_certfile,
- keyfile=signing_keyfile,
- ca_certs='examples/pki/certs/cacert.pem')
- self.config_fixture.config(group='token', driver='kvs')
- self.config_fixture.config(
- group='saml', certfile=signing_certfile, keyfile=signing_keyfile)
- self.config_fixture.config(
- default_log_levels=[
- 'amqp=WARN',
- 'amqplib=WARN',
- 'boto=WARN',
- 'qpid=WARN',
- 'sqlalchemy=WARN',
- 'suds=INFO',
- 'oslo.messaging=INFO',
- 'iso8601=WARN',
- 'requests.packages.urllib3.connectionpool=WARN',
- 'routes.middleware=INFO',
- 'stevedore.extension=INFO',
- 'keystone.notifications=INFO',
- 'keystone.common.ldap=INFO',
- ])
- self.auth_plugin_config_override()
-
- def auth_plugin_config_override(self, methods=None, **method_classes):
- self.useFixture(
- ksfixtures.ConfigAuthPlugins(self.config_fixture,
- methods,
- **method_classes))
-
- def _assert_config_overrides_called(self):
- assert self.__config_overrides_called is True
-
- def setUp(self):
- super(TestCase, self).setUp()
- self.__config_overrides_called = False
- self.__load_backends_called = False
- self.addCleanup(CONF.reset)
- self.config_fixture = self.useFixture(config_fixture.Config(CONF))
- self.addCleanup(delattr, self, 'config_fixture')
- self.config(self.config_files())
-
- # NOTE(morganfainberg): mock the auth plugin setup to use the config
- # fixture which automatically unregisters options when performing
- # cleanup.
- def mocked_register_auth_plugin_opt(conf, opt):
- self.config_fixture.register_opt(opt, group='auth')
- self.useFixture(mockpatch.PatchObject(
- config, '_register_auth_plugin_opt',
- new=mocked_register_auth_plugin_opt))
-
- self.sql_driver_version_overrides = {}
- self.config_overrides()
- # NOTE(morganfainberg): ensure config_overrides has been called.
- self.addCleanup(self._assert_config_overrides_called)
-
- self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
-
- # NOTE(morganfainberg): This code is a copy from the oslo-incubator
- # log module. This is not in a function or otherwise available to use
- # without having a CONF object to setup logging. This should help to
- # reduce the log size by limiting what we log (similar to how Keystone
- # would run under mod_wsgi or eventlet).
- for pair in CONF.default_log_levels:
- mod, _sep, level_name = pair.partition('=')
- logger = logging.getLogger(mod)
- logger.setLevel(level_name)
-
- self.useFixture(ksfixtures.Cache())
-
- # Clear the registry of providers so that providers from previous
- # tests aren't used.
- self.addCleanup(dependency.reset)
-
- # Ensure Notification subscriptions and resource types are empty
- self.addCleanup(notifications.clear_subscribers)
- self.addCleanup(notifications.reset_notifier)
-
- # Reset the auth-plugin registry
- self.addCleanup(self.clear_auth_plugin_registry)
-
- self.addCleanup(setattr, controllers, '_VERSIONS', [])
-
- def config(self, config_files):
- sql.initialize()
- CONF(args=[], project='keystone', default_config_files=config_files)
-
- def load_backends(self):
- """Initializes each manager and assigns them to an attribute."""
- # TODO(blk-u): Shouldn't need to clear the registry here, but some
- # tests call load_backends multiple times. These should be fixed to
- # only call load_backends once.
- dependency.reset()
-
- # TODO(morganfainberg): Shouldn't need to clear the registry here, but
- # some tests call load_backends multiple times. Since it is not
- # possible to re-configure a backend, we need to clear the list. This
- # should eventually be removed once testing has been cleaned up.
- kvs_core.KEY_VALUE_STORE_REGISTRY.clear()
-
- self.clear_auth_plugin_registry()
- drivers, _unused = common.setup_backends(
- load_extra_backends_fn=self.load_extra_backends)
-
- for manager_name, manager in drivers.items():
- setattr(self, manager_name, manager)
- self.addCleanup(self.cleanup_instance(*list(drivers.keys())))
-
- def load_extra_backends(self):
- """Override to load managers that aren't loaded by default.
-
- This is useful to load managers initialized by extensions. No extra
- backends are loaded by default.
-
- :returns: dict of name -> manager
- """
- return {}
-
- def load_fixtures(self, fixtures):
- """Hacky basic and naive fixture loading based on a python module.
-
- Expects that the various APIs into the various services are already
- defined on `self`.
-
- """
- # NOTE(dstanek): create a list of attribute names to be removed
- # from this instance during cleanup
- fixtures_to_cleanup = []
-
- # TODO(termie): doing something from json, probably based on Django's
- # loaddata will be much preferred.
- if (hasattr(self, 'identity_api') and
- hasattr(self, 'assignment_api') and
- hasattr(self, 'resource_api')):
- for domain in fixtures.DOMAINS:
- try:
- rv = self.resource_api.create_domain(domain['id'], domain)
- except exception.Conflict:
- rv = self.resource_api.get_domain(domain['id'])
- except exception.NotImplemented:
- rv = domain
- attrname = 'domain_%s' % domain['id']
- setattr(self, attrname, rv)
- fixtures_to_cleanup.append(attrname)
-
- for tenant in fixtures.TENANTS:
- tenant_attr_name = 'tenant_%s' % tenant['name'].lower()
- if hasattr(self, tenant_attr_name):
- try:
- # This will clear out any roles on the project as well
- self.resource_api.delete_project(tenant['id'])
- except exception.ProjectNotFound:
- pass
- rv = self.resource_api.create_project(
- tenant['id'], tenant)
-
- setattr(self, tenant_attr_name, rv)
- fixtures_to_cleanup.append(tenant_attr_name)
-
- for role in fixtures.ROLES:
- try:
- rv = self.role_api.create_role(role['id'], role)
- except exception.Conflict:
- rv = self.role_api.get_role(role['id'])
- attrname = 'role_%s' % role['id']
- setattr(self, attrname, rv)
- fixtures_to_cleanup.append(attrname)
-
- for user in fixtures.USERS:
- user_copy = user.copy()
- tenants = user_copy.pop('tenants')
- try:
- existing_user = getattr(self, 'user_%s' % user['id'], None)
- if existing_user is not None:
- self.identity_api.delete_user(existing_user['id'])
- except exception.UserNotFound:
- pass
-
- # For users, the manager layer will generate the ID
- user_copy = self.identity_api.create_user(user_copy)
- # Our tests expect that the password is still in the user
- # record so that they can reference it, so put it back into
- # the dict returned.
- user_copy['password'] = user['password']
-
- for tenant_id in tenants:
- try:
- self.assignment_api.add_user_to_project(
- tenant_id, user_copy['id'])
- except exception.Conflict:
- pass
- # Use the ID from the fixture as the attribute name, so
- # that our tests can easily reference each user dict, while
- # the ID in the dict will be the real public ID.
- attrname = 'user_%s' % user['id']
- setattr(self, attrname, user_copy)
- fixtures_to_cleanup.append(attrname)
-
- for role_assignment in fixtures.ROLE_ASSIGNMENTS:
- role_id = role_assignment['role_id']
- user = role_assignment['user']
- tenant_id = role_assignment['tenant_id']
- user_id = getattr(self, 'user_%s' % user)['id']
- try:
- self.assignment_api.add_role_to_user_and_project(
- user_id, tenant_id, role_id)
- except exception.Conflict:
- pass
-
- self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup))
-
- def _paste_config(self, config):
- if not config.startswith('config:'):
- test_path = os.path.join(TESTSDIR, config)
- etc_path = os.path.join(ROOTDIR, 'etc', config)
- for path in [test_path, etc_path]:
- if os.path.exists('%s-paste.ini' % path):
- return 'config:%s-paste.ini' % path
- return config
-
- def loadapp(self, config, name='main'):
- return service.loadapp(self._paste_config(config), name=name)
-
- def clear_auth_plugin_registry(self):
- auth.controllers.AUTH_METHODS.clear()
- auth.controllers.AUTH_PLUGINS_LOADED = False
-
- def assertCloseEnoughForGovernmentWork(self, a, b, delta=3):
- """Asserts that two datetimes are nearly equal within a small delta.
-
- :param delta: Maximum allowable time delta, defined in seconds.
- """
- if a == b:
- # Short-circuit if the values are the same.
- return
-
- msg = '%s != %s within %s delta' % (a, b, delta)
-
- self.assertTrue(abs(a - b).seconds <= delta, msg)
-
- def assertNotEmpty(self, l):
- self.assertTrue(len(l))
-
- def assertRaisesRegexp(self, expected_exception, expected_regexp,
- callable_obj, *args, **kwargs):
- """Asserts that the message in a raised exception matches a regexp."""
- try:
- callable_obj(*args, **kwargs)
- except expected_exception as exc_value:
- if isinstance(expected_regexp, six.string_types):
- expected_regexp = re.compile(expected_regexp)
-
- if isinstance(exc_value.args[0], six.text_type):
- if not expected_regexp.search(six.text_type(exc_value)):
- raise self.failureException(
- '"%s" does not match "%s"' %
- (expected_regexp.pattern, six.text_type(exc_value)))
- else:
- if not expected_regexp.search(str(exc_value)):
- raise self.failureException(
- '"%s" does not match "%s"' %
- (expected_regexp.pattern, str(exc_value)))
- else:
- if hasattr(expected_exception, '__name__'):
- excName = expected_exception.__name__
- else:
- excName = str(expected_exception)
- raise self.failureException("%s not raised" % excName)
-
- @property
- def ipv6_enabled(self):
- if socket.has_ipv6:
- sock = None
- try:
- sock = socket.socket(socket.AF_INET6)
- # NOTE(Mouad): Try to bind to IPv6 loopback ip address.
- sock.bind(("::1", 0))
- return True
- except socket.error:
- pass
- finally:
- if sock:
- sock.close()
- return False
-
- def skip_if_no_ipv6(self):
- if not self.ipv6_enabled:
- raise self.skipTest("IPv6 is not enabled in the system")
-
- def skip_if_env_not_set(self, env_var):
- if not os.environ.get(env_var):
- self.skipTest('Env variable %s is not set.' % env_var)
-
-
-class SQLDriverOverrides(object):
- """A mixin for consolidating sql-specific test overrides."""
-
- def config_overrides(self):
- super(SQLDriverOverrides, self).config_overrides()
- # SQL specific driver overrides
- self.config_fixture.config(group='catalog', driver='sql')
- self.config_fixture.config(group='identity', driver='sql')
- self.config_fixture.config(group='policy', driver='sql')
- self.config_fixture.config(group='token', driver='sql')
- self.config_fixture.config(group='trust', driver='sql')
-
- def use_specific_sql_driver_version(self, driver_path,
- versionless_backend, version_suffix):
- """Add this versioned driver to the list that will be loaded.
-
- :param driver_path: The path to the drivers, e.g. 'keystone.assignment'
- :param versionless_backend: The name of the versionless drivers, e.g.
- 'backends'
- :param version_suffix: The suffix for the version , e.g. ``V8_``
-
- This method assumes that versioned drivers are named:
- <version_suffix><name of versionless driver>, e.g. 'V8_backends'.
-
- """
- self.sql_driver_version_overrides[driver_path] = {
- 'versionless_backend': versionless_backend,
- 'versioned_backend': version_suffix + versionless_backend}
diff --git a/keystone-moon/keystone/tests/unit/default_catalog.templates b/keystone-moon/keystone/tests/unit/default_catalog.templates
deleted file mode 100644
index faf87eb5..00000000
--- a/keystone-moon/keystone/tests/unit/default_catalog.templates
+++ /dev/null
@@ -1,14 +0,0 @@
-# config for templated.Catalog, using camelCase because I don't want to do
-# translations for keystone compat
-catalog.RegionOne.identity.publicURL = http://localhost:$(public_port)s/v2.0
-catalog.RegionOne.identity.adminURL = http://localhost:$(admin_port)s/v2.0
-catalog.RegionOne.identity.internalURL = http://localhost:$(admin_port)s/v2.0
-catalog.RegionOne.identity.name = 'Identity Service'
-catalog.RegionOne.identity.id = 1
-
-# fake compute service for now to help novaclient tests work
-catalog.RegionOne.compute.publicURL = http://localhost:8774/v1.1/$(tenant_id)s
-catalog.RegionOne.compute.adminURL = http://localhost:8774/v1.1/$(tenant_id)s
-catalog.RegionOne.compute.internalURL = http://localhost:8774/v1.1/$(tenant_id)s
-catalog.RegionOne.compute.name = 'Compute Service'
-catalog.RegionOne.compute.id = 2
diff --git a/keystone-moon/keystone/tests/unit/default_fixtures.py b/keystone-moon/keystone/tests/unit/default_fixtures.py
deleted file mode 100644
index 7f661986..00000000
--- a/keystone-moon/keystone/tests/unit/default_fixtures.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# NOTE(dolph): please try to avoid additional fixtures if possible; test suite
-# performance may be negatively affected.
-import uuid
-
-BAR_TENANT_ID = uuid.uuid4().hex
-BAZ_TENANT_ID = uuid.uuid4().hex
-MTU_TENANT_ID = uuid.uuid4().hex
-SERVICE_TENANT_ID = uuid.uuid4().hex
-DEFAULT_DOMAIN_ID = 'default'
-
-TENANTS = [
- {
- 'id': BAR_TENANT_ID,
- 'name': 'BAR',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'description': 'description',
- 'enabled': True,
- 'parent_id': DEFAULT_DOMAIN_ID,
- 'is_domain': False,
- }, {
- 'id': BAZ_TENANT_ID,
- 'name': 'BAZ',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'description': 'description',
- 'enabled': True,
- 'parent_id': DEFAULT_DOMAIN_ID,
- 'is_domain': False,
- }, {
- 'id': MTU_TENANT_ID,
- 'name': 'MTU',
- 'description': 'description',
- 'enabled': True,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': DEFAULT_DOMAIN_ID,
- 'is_domain': False,
- }, {
- 'id': SERVICE_TENANT_ID,
- 'name': 'service',
- 'description': 'description',
- 'enabled': True,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': DEFAULT_DOMAIN_ID,
- 'is_domain': False,
- }
-]
-
-# NOTE(ja): a role of keystone_admin is done in setUp
-USERS = [
- # NOTE(morganfainberg): Admin user for replacing admin_token_auth
- {
- 'id': 'reqadmin',
- 'name': 'REQ_ADMIN',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'password',
- 'tenants': [],
- 'enabled': True
- },
- {
- 'id': 'foo',
- 'name': 'FOO',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'foo2',
- 'tenants': [BAR_TENANT_ID],
- 'enabled': True,
- 'email': 'foo@bar.com',
- }, {
- 'id': 'two',
- 'name': 'TWO',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'two2',
- 'enabled': True,
- 'default_project_id': BAZ_TENANT_ID,
- 'tenants': [BAZ_TENANT_ID],
- 'email': 'two@three.com',
- }, {
- 'id': 'badguy',
- 'name': 'BadGuy',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'bad',
- 'enabled': False,
- 'default_project_id': BAZ_TENANT_ID,
- 'tenants': [BAZ_TENANT_ID],
- 'email': 'bad@guy.com',
- }, {
- 'id': 'sna',
- 'name': 'SNA',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'snafu',
- 'enabled': True,
- 'tenants': [BAR_TENANT_ID],
- 'email': 'sna@snl.coom',
- }
-]
-
-ROLES = [
- {
- 'id': 'admin',
- 'name': 'admin',
- 'domain_id': None,
- }, {
- 'id': 'member',
- 'name': 'Member',
- 'domain_id': None,
- }, {
- 'id': '9fe2ff9ee4384b1894a90878d3e92bab',
- 'name': '_member_',
- 'domain_id': None,
- }, {
- 'id': 'other',
- 'name': 'Other',
- 'domain_id': None,
- }, {
- 'id': 'browser',
- 'name': 'Browser',
- 'domain_id': None,
- }, {
- 'id': 'writer',
- 'name': 'Writer',
- 'domain_id': None,
- }, {
- 'id': 'service',
- 'name': 'Service',
- 'domain_id': None,
- }
-]
-
-# NOTE(morganfainberg): Admin assignment for replacing admin_token_auth
-ROLE_ASSIGNMENTS = [
- {
- 'user': 'reqadmin',
- 'tenant_id': SERVICE_TENANT_ID,
- 'role_id': 'admin'
- },
-]
-
-DOMAINS = [{'description':
- (u'The default domain'),
- 'enabled': True,
- 'id': DEFAULT_DOMAIN_ID,
- 'name': u'Default'}]
diff --git a/keystone-moon/keystone/tests/unit/external/README.rst b/keystone-moon/keystone/tests/unit/external/README.rst
deleted file mode 100644
index e8f9fa65..00000000
--- a/keystone-moon/keystone/tests/unit/external/README.rst
+++ /dev/null
@@ -1,9 +0,0 @@
-This directory contains interface tests for external libraries. The goal
-is not to test every possible path through a library's code and get 100%
-coverage. It's to give us a level of confidence that their general interface
-remains the same through version upgrades.
-
-This gives us a place to put these tests without having to litter our
-own tests with assertions that are not directly related to the code
-under test. The expectations for the external library are all in one
-place so it makes it easier for us to find out what they are.
diff --git a/keystone-moon/keystone/tests/unit/external/__init__.py b/keystone-moon/keystone/tests/unit/external/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/external/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/external/test_timeutils.py b/keystone-moon/keystone/tests/unit/external/test_timeutils.py
deleted file mode 100644
index 7fc72d58..00000000
--- a/keystone-moon/keystone/tests/unit/external/test_timeutils.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import datetime
-
-from oslo_utils import timeutils
-
-import keystone.tests.unit as tests
-
-
-class TestTimeUtils(tests.BaseTestCase):
-
- def test_parsing_date_strings_returns_a_datetime(self):
- example_date_str = '2015-09-23T04:45:37.196621Z'
- dt = timeutils.parse_strtime(example_date_str, fmt=tests.TIME_FORMAT)
- self.assertIsInstance(dt, datetime.datetime)
-
- def test_parsing_invalid_date_strings_raises_a_ValueError(self):
- example_date_str = ''
- simple_format = '%Y'
- self.assertRaises(ValueError,
- timeutils.parse_strtime,
- example_date_str,
- fmt=simple_format)
diff --git a/keystone-moon/keystone/tests/unit/fakeldap.py b/keystone-moon/keystone/tests/unit/fakeldap.py
deleted file mode 100644
index 9ad1f218..00000000
--- a/keystone-moon/keystone/tests/unit/fakeldap.py
+++ /dev/null
@@ -1,664 +0,0 @@
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Fake LDAP server for test harness.
-
-This class does very little error checking, and knows nothing about ldap
-class definitions. It implements the minimum emulation of the python ldap
-library to work with keystone.
-
-"""
-
-import random
-import re
-import shelve
-
-import ldap
-from oslo_config import cfg
-from oslo_log import log
-import six
-from six import moves
-
-from keystone.common.ldap import core
-from keystone import exception
-
-
-SCOPE_NAMES = {
- ldap.SCOPE_BASE: 'SCOPE_BASE',
- ldap.SCOPE_ONELEVEL: 'SCOPE_ONELEVEL',
- ldap.SCOPE_SUBTREE: 'SCOPE_SUBTREE',
-}
-
-# http://msdn.microsoft.com/en-us/library/windows/desktop/aa366991(v=vs.85).aspx # noqa
-CONTROL_TREEDELETE = '1.2.840.113556.1.4.805'
-
-LOG = log.getLogger(__name__)
-CONF = cfg.CONF
-
-
-def _internal_attr(attr_name, value_or_values):
- def normalize_value(value):
- return core.utf8_decode(value)
-
- def normalize_dn(dn):
- # Capitalize the attribute names as an LDAP server might.
-
- # NOTE(blk-u): Special case for this tested value, used with
- # test_user_id_comma. The call to str2dn here isn't always correct
- # here, because `dn` is escaped for an LDAP filter. str2dn() normally
- # works only because there's no special characters in `dn`.
- if dn == 'cn=Doe\\5c, John,ou=Users,cn=example,cn=com':
- return 'CN=Doe\\, John,OU=Users,CN=example,CN=com'
-
- # NOTE(blk-u): Another special case for this tested value. When a
- # roleOccupant has an escaped comma, it gets converted to \2C.
- if dn == 'cn=Doe\\, John,ou=Users,cn=example,cn=com':
- return 'CN=Doe\\2C John,OU=Users,CN=example,CN=com'
-
- try:
- dn = ldap.dn.str2dn(core.utf8_encode(dn))
- except ldap.DECODING_ERROR:
- # NOTE(amakarov): In case of IDs instead of DNs in group members
- # they must be handled as regular values.
- return normalize_value(dn)
-
- norm = []
- for part in dn:
- name, val, i = part[0]
- name = core.utf8_decode(name)
- name = name.upper()
- name = core.utf8_encode(name)
- norm.append([(name, val, i)])
- return core.utf8_decode(ldap.dn.dn2str(norm))
-
- if attr_name in ('member', 'roleOccupant'):
- attr_fn = normalize_dn
- else:
- attr_fn = normalize_value
-
- if isinstance(value_or_values, list):
- return [attr_fn(x) for x in value_or_values]
- return [attr_fn(value_or_values)]
-
-
-def _match_query(query, attrs, attrs_checked):
- """Match an ldap query to an attribute dictionary.
-
- The characters &, |, and ! are supported in the query. No syntax checking
- is performed, so malformed queries will not work correctly.
- """
- # cut off the parentheses
- inner = query[1:-1]
- if inner.startswith(('&', '|')):
- if inner[0] == '&':
- matchfn = all
- else:
- matchfn = any
- # cut off the & or |
- groups = _paren_groups(inner[1:])
- return matchfn(_match_query(group, attrs, attrs_checked)
- for group in groups)
- if inner.startswith('!'):
- # cut off the ! and the nested parentheses
- return not _match_query(query[2:-1], attrs, attrs_checked)
-
- (k, _sep, v) = inner.partition('=')
- attrs_checked.add(k.lower())
- return _match(k, v, attrs)
-
-
-def _paren_groups(source):
- """Split a string into parenthesized groups."""
- count = 0
- start = 0
- result = []
- for pos in moves.range(len(source)):
- if source[pos] == '(':
- if count == 0:
- start = pos
- count += 1
- if source[pos] == ')':
- count -= 1
- if count == 0:
- result.append(source[start:pos + 1])
- return result
-
-
-def _match(key, value, attrs):
- """Match a given key and value against an attribute list."""
- def match_with_wildcards(norm_val, val_list):
- # Case insensitive checking with wildcards
- if norm_val.startswith('*'):
- if norm_val.endswith('*'):
- # Is the string anywhere in the target?
- for x in val_list:
- if norm_val[1:-1] in x:
- return True
- else:
- # Is the string at the end of the target?
- for x in val_list:
- if (norm_val[1:] ==
- x[len(x) - len(norm_val) + 1:]):
- return True
- elif norm_val.endswith('*'):
- # Is the string at the start of the target?
- for x in val_list:
- if norm_val[:-1] == x[:len(norm_val) - 1]:
- return True
- else:
- # Is the string an exact match?
- for x in val_list:
- if check_value == x:
- return True
- return False
-
- if key not in attrs:
- return False
- # This is a pure wild card search, so the answer must be yes!
- if value == '*':
- return True
- if key == 'serviceId':
- # for serviceId, the backend is returning a list of numbers
- # make sure we convert them to strings first before comparing
- # them
- str_sids = [six.text_type(x) for x in attrs[key]]
- return six.text_type(value) in str_sids
- if key != 'objectclass':
- check_value = _internal_attr(key, value)[0].lower()
- norm_values = list(
- _internal_attr(key, x)[0].lower() for x in attrs[key])
- return match_with_wildcards(check_value, norm_values)
- # it is an objectclass check, so check subclasses
- values = _subs(value)
- for v in values:
- if v in attrs[key]:
- return True
- return False
-
-
-def _subs(value):
- """Returns a list of subclass strings.
-
- The strings represent the ldap objectclass plus any subclasses that
- inherit from it. Fakeldap doesn't know about the ldap object structure,
- so subclasses need to be defined manually in the dictionary below.
-
- """
- subs = {'groupOfNames': ['keystoneTenant',
- 'keystoneRole',
- 'keystoneTenantRole']}
- if value in subs:
- return [value] + subs[value]
- return [value]
-
-
-server_fail = False
-
-
-class FakeShelve(dict):
-
- def sync(self):
- pass
-
-
-FakeShelves = {}
-PendingRequests = {}
-
-
-class FakeLdap(core.LDAPHandler):
- """Emulate the python-ldap API.
-
- The python-ldap API requires all strings to be UTF-8 encoded. This
- is assured by the caller of this interface
- (i.e. KeystoneLDAPHandler).
-
- However, internally this emulation MUST process and store strings
- in a canonical form which permits operations on
- characters. Encoded strings do not provide the ability to operate
- on characters. Therefore this emulation accepts UTF-8 encoded
- strings, decodes them to unicode for operations internal to this
- emulation, and encodes them back to UTF-8 when returning values
- from the emulation.
-
- """
-
- __prefix = 'ldap:'
-
- def __init__(self, conn=None):
- super(FakeLdap, self).__init__(conn=conn)
- self._ldap_options = {ldap.OPT_DEREF: ldap.DEREF_NEVER}
-
- def connect(self, url, page_size=0, alias_dereferencing=None,
- use_tls=False, tls_cacertfile=None, tls_cacertdir=None,
- tls_req_cert='demand', chase_referrals=None, debug_level=None,
- use_pool=None, pool_size=None, pool_retry_max=None,
- pool_retry_delay=None, pool_conn_timeout=None,
- pool_conn_lifetime=None):
- if url.startswith('fake://memory'):
- if url not in FakeShelves:
- FakeShelves[url] = FakeShelve()
- self.db = FakeShelves[url]
- else:
- self.db = shelve.open(url[7:])
-
- using_ldaps = url.lower().startswith("ldaps")
-
- if use_tls and using_ldaps:
- raise AssertionError('Invalid TLS / LDAPS combination')
-
- if use_tls:
- if tls_cacertfile:
- ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile)
- elif tls_cacertdir:
- ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
- if tls_req_cert in list(core.LDAP_TLS_CERTS.values()):
- ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
- else:
- raise ValueError("invalid TLS_REQUIRE_CERT tls_req_cert=%s",
- tls_req_cert)
-
- if alias_dereferencing is not None:
- self.set_option(ldap.OPT_DEREF, alias_dereferencing)
- self.page_size = page_size
-
- self.use_pool = use_pool
- self.pool_size = pool_size
- self.pool_retry_max = pool_retry_max
- self.pool_retry_delay = pool_retry_delay
- self.pool_conn_timeout = pool_conn_timeout
- self.pool_conn_lifetime = pool_conn_lifetime
-
- def dn(self, dn):
- return core.utf8_decode(dn)
-
- def _dn_to_id_attr(self, dn):
- return core.utf8_decode(ldap.dn.str2dn(core.utf8_encode(dn))[0][0][0])
-
- def _dn_to_id_value(self, dn):
- return core.utf8_decode(ldap.dn.str2dn(core.utf8_encode(dn))[0][0][1])
-
- def key(self, dn):
- return '%s%s' % (self.__prefix, self.dn(dn))
-
- def simple_bind_s(self, who='', cred='',
- serverctrls=None, clientctrls=None):
- """This method is ignored, but provided for compatibility."""
- if server_fail:
- raise ldap.SERVER_DOWN
- whos = ['cn=Admin', CONF.ldap.user]
- if who in whos and cred in ['password', CONF.ldap.password]:
- return
-
- try:
- attrs = self.db[self.key(who)]
- except KeyError:
- LOG.debug('bind fail: who=%s not found', core.utf8_decode(who))
- raise ldap.NO_SUCH_OBJECT
-
- db_password = None
- try:
- db_password = attrs['userPassword'][0]
- except (KeyError, IndexError):
- LOG.debug('bind fail: password for who=%s not found',
- core.utf8_decode(who))
- raise ldap.INAPPROPRIATE_AUTH
-
- if cred != db_password:
- LOG.debug('bind fail: password for who=%s does not match',
- core.utf8_decode(who))
- raise ldap.INVALID_CREDENTIALS
-
- def unbind_s(self):
- """This method is ignored, but provided for compatibility."""
- if server_fail:
- raise ldap.SERVER_DOWN
-
- def add_s(self, dn, modlist):
- """Add an object with the specified attributes at dn."""
- if server_fail:
- raise ldap.SERVER_DOWN
-
- id_attr_in_modlist = False
- id_attr = self._dn_to_id_attr(dn)
- id_value = self._dn_to_id_value(dn)
-
- # The LDAP API raises a TypeError if attr name is None.
- for k, dummy_v in modlist:
- if k is None:
- raise TypeError('must be string, not None. modlist=%s' %
- modlist)
-
- if k == id_attr:
- for val in dummy_v:
- if core.utf8_decode(val) == id_value:
- id_attr_in_modlist = True
-
- if not id_attr_in_modlist:
- LOG.debug('id_attribute=%(attr)s missing, attributes=%(attrs)s' %
- {'attr': id_attr, 'attrs': modlist})
- raise ldap.NAMING_VIOLATION
- key = self.key(dn)
- LOG.debug('add item: dn=%(dn)s, attrs=%(attrs)s', {
- 'dn': core.utf8_decode(dn), 'attrs': modlist})
- if key in self.db:
- LOG.debug('add item failed: dn=%s is already in store.',
- core.utf8_decode(dn))
- raise ldap.ALREADY_EXISTS(dn)
-
- self.db[key] = {k: _internal_attr(k, v) for k, v in modlist}
- self.db.sync()
-
- def delete_s(self, dn):
- """Remove the ldap object at specified dn."""
- return self.delete_ext_s(dn, serverctrls=[])
-
- def _getChildren(self, dn):
- return [k for k, v in self.db.items()
- if re.match('%s.*,%s' % (
- re.escape(self.__prefix),
- re.escape(self.dn(dn))), k)]
-
- def delete_ext_s(self, dn, serverctrls, clientctrls=None):
- """Remove the ldap object at specified dn."""
- if server_fail:
- raise ldap.SERVER_DOWN
-
- try:
- if CONTROL_TREEDELETE in [c.controlType for c in serverctrls]:
- LOG.debug('FakeLdap subtree_delete item: dn=%s',
- core.utf8_decode(dn))
- children = self._getChildren(dn)
- for c in children:
- del self.db[c]
-
- key = self.key(dn)
- LOG.debug('FakeLdap delete item: dn=%s', core.utf8_decode(dn))
- del self.db[key]
- except KeyError:
- LOG.debug('delete item failed: dn=%s not found.',
- core.utf8_decode(dn))
- raise ldap.NO_SUCH_OBJECT
- self.db.sync()
-
- def modify_s(self, dn, modlist):
- """Modify the object at dn using the attribute list.
-
- :param dn: an LDAP DN
- :param modlist: a list of tuples in the following form:
- ([MOD_ADD | MOD_DELETE | MOD_REPACE], attribute, value)
- """
- if server_fail:
- raise ldap.SERVER_DOWN
-
- key = self.key(dn)
- LOG.debug('modify item: dn=%(dn)s attrs=%(attrs)s', {
- 'dn': core.utf8_decode(dn), 'attrs': modlist})
- try:
- entry = self.db[key]
- except KeyError:
- LOG.debug('modify item failed: dn=%s not found.',
- core.utf8_decode(dn))
- raise ldap.NO_SUCH_OBJECT
-
- for cmd, k, v in modlist:
- values = entry.setdefault(k, [])
- if cmd == ldap.MOD_ADD:
- v = _internal_attr(k, v)
- for x in v:
- if x in values:
- raise ldap.TYPE_OR_VALUE_EXISTS
- values += v
- elif cmd == ldap.MOD_REPLACE:
- values[:] = _internal_attr(k, v)
- elif cmd == ldap.MOD_DELETE:
- if v is None:
- if not values:
- LOG.debug('modify item failed: '
- 'item has no attribute "%s" to delete', k)
- raise ldap.NO_SUCH_ATTRIBUTE
- values[:] = []
- else:
- for val in _internal_attr(k, v):
- try:
- values.remove(val)
- except ValueError:
- LOG.debug('modify item failed: '
- 'item has no attribute "%(k)s" with '
- 'value "%(v)s" to delete', {
- 'k': k, 'v': val})
- raise ldap.NO_SUCH_ATTRIBUTE
- else:
- LOG.debug('modify item failed: unknown command %s', cmd)
- raise NotImplementedError('modify_s action %s not'
- ' implemented' % cmd)
- self.db[key] = entry
- self.db.sync()
-
- def search_s(self, base, scope,
- filterstr='(objectClass=*)', attrlist=None, attrsonly=0):
- """Search for all matching objects under base using the query.
-
- Args:
- base -- dn to search under
- scope -- search scope (base, subtree, onelevel)
- filterstr -- filter objects by
- attrlist -- attrs to return. Returns all attrs if not specified
-
- """
- if server_fail:
- raise ldap.SERVER_DOWN
-
- if (not filterstr) and (scope != ldap.SCOPE_BASE):
- raise AssertionError('Search without filter on onelevel or '
- 'subtree scope')
-
- if scope == ldap.SCOPE_BASE:
- try:
- item_dict = self.db[self.key(base)]
- except KeyError:
- LOG.debug('search fail: dn not found for SCOPE_BASE')
- raise ldap.NO_SUCH_OBJECT
- results = [(base, item_dict)]
- elif scope == ldap.SCOPE_SUBTREE:
- # FIXME - LDAP search with SUBTREE scope must return the base
- # entry, but the code below does _not_. Unfortunately, there are
- # several tests that depend on this broken behavior, and fail
- # when the base entry is returned in the search results. The
- # fix is easy here, just initialize results as above for
- # the SCOPE_BASE case.
- # https://bugs.launchpad.net/keystone/+bug/1368772
- try:
- item_dict = self.db[self.key(base)]
- except KeyError:
- LOG.debug('search fail: dn not found for SCOPE_SUBTREE')
- raise ldap.NO_SUCH_OBJECT
- results = [(base, item_dict)]
- extraresults = [(k[len(self.__prefix):], v)
- for k, v in self.db.items()
- if re.match('%s.*,%s' %
- (re.escape(self.__prefix),
- re.escape(self.dn(base))), k)]
- results.extend(extraresults)
- elif scope == ldap.SCOPE_ONELEVEL:
-
- def get_entries():
- base_dn = ldap.dn.str2dn(core.utf8_encode(base))
- base_len = len(base_dn)
-
- for k, v in self.db.items():
- if not k.startswith(self.__prefix):
- continue
- k_dn_str = k[len(self.__prefix):]
- k_dn = ldap.dn.str2dn(core.utf8_encode(k_dn_str))
- if len(k_dn) != base_len + 1:
- continue
- if k_dn[-base_len:] != base_dn:
- continue
- yield (k_dn_str, v)
-
- results = list(get_entries())
-
- else:
- # openldap client/server raises PROTOCOL_ERROR for unexpected scope
- raise ldap.PROTOCOL_ERROR
-
- objects = []
- for dn, attrs in results:
- # filter the objects by filterstr
- id_attr, id_val, _ = ldap.dn.str2dn(core.utf8_encode(dn))[0][0]
- id_attr = core.utf8_decode(id_attr)
- id_val = core.utf8_decode(id_val)
- match_attrs = attrs.copy()
- match_attrs[id_attr] = [id_val]
- attrs_checked = set()
- if not filterstr or _match_query(filterstr, match_attrs,
- attrs_checked):
- if (filterstr and
- (scope != ldap.SCOPE_BASE) and
- ('objectclass' not in attrs_checked)):
- raise AssertionError('No objectClass in search filter')
- # filter the attributes by attrlist
- attrs = {k: v for k, v in attrs.items()
- if not attrlist or k in attrlist}
- objects.append((dn, attrs))
-
- return objects
-
- def set_option(self, option, invalue):
- self._ldap_options[option] = invalue
-
- def get_option(self, option):
- value = self._ldap_options.get(option)
- return value
-
- def search_ext(self, base, scope,
- filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
- serverctrls=None, clientctrls=None,
- timeout=-1, sizelimit=0):
- if clientctrls is not None or timeout != -1 or sizelimit != 0:
- raise exception.NotImplemented()
-
- # only passing a single server control is supported by this fake ldap
- if len(serverctrls) > 1:
- raise exception.NotImplemented()
-
- # search_ext is async and returns an identifier used for
- # retrieving the results via result3(). This will be emulated by
- # storing the request in a variable with random integer key and
- # performing the real lookup in result3()
- msgid = random.randint(0, 1000)
- PendingRequests[msgid] = (base, scope, filterstr, attrlist, attrsonly,
- serverctrls)
- return msgid
-
- def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None,
- resp_ctrl_classes=None):
- """Execute async request
-
- Only msgid param is supported. Request info is fetched from global
- variable `PendingRequests` by msgid, executed using search_s and
- limited if requested.
- """
- if all != 1 or timeout is not None or resp_ctrl_classes is not None:
- raise exception.NotImplemented()
-
- params = PendingRequests[msgid]
- # search_s accepts a subset of parameters of search_ext,
- # that's why we use only the first 5.
- results = self.search_s(*params[:5])
-
- # extract limit from serverctrl
- serverctrls = params[5]
- ctrl = serverctrls[0]
-
- if ctrl.size:
- rdata = results[:ctrl.size]
- else:
- rdata = results
-
- # real result3 returns various service info -- rtype, rmsgid,
- # serverctrls. Now this info is not used, so all this info is None
- rtype = None
- rmsgid = None
- serverctrls = None
- return (rtype, rdata, rmsgid, serverctrls)
-
-
-class FakeLdapPool(FakeLdap):
- """Emulate the python-ldap API with pooled connections.
-
- This class is used as connector class in PooledLDAPHandler.
-
- """
-
- def __init__(self, uri, retry_max=None, retry_delay=None, conn=None):
- super(FakeLdapPool, self).__init__(conn=conn)
- self.url = uri
- self.connected = None
- self.conn = self
- self._connection_time = 5 # any number greater than 0
-
- def get_lifetime(self):
- return self._connection_time
-
- def simple_bind_s(self, who=None, cred=None,
- serverctrls=None, clientctrls=None):
- if self.url.startswith('fakepool://memory'):
- if self.url not in FakeShelves:
- FakeShelves[self.url] = FakeShelve()
- self.db = FakeShelves[self.url]
- else:
- self.db = shelve.open(self.url[11:])
-
- if not who:
- who = 'cn=Admin'
- if not cred:
- cred = 'password'
-
- super(FakeLdapPool, self).simple_bind_s(who=who, cred=cred,
- serverctrls=serverctrls,
- clientctrls=clientctrls)
-
- def unbind_ext_s(self):
- """Added to extend FakeLdap as connector class."""
- pass
-
-
-class FakeLdapNoSubtreeDelete(FakeLdap):
- """FakeLdap subclass that does not support subtree delete
-
- Same as FakeLdap except delete will throw the LDAP error
- ldap.NOT_ALLOWED_ON_NONLEAF if there is an attempt to delete
- an entry that has children.
- """
-
- def delete_ext_s(self, dn, serverctrls, clientctrls=None):
- """Remove the ldap object at specified dn."""
- if server_fail:
- raise ldap.SERVER_DOWN
-
- try:
- children = self._getChildren(dn)
- if children:
- raise ldap.NOT_ALLOWED_ON_NONLEAF
-
- except KeyError:
- LOG.debug('delete item failed: dn=%s not found.',
- core.utf8_decode(dn))
- raise ldap.NO_SUCH_OBJECT
- super(FakeLdapNoSubtreeDelete, self).delete_ext_s(dn,
- serverctrls,
- clientctrls)
diff --git a/keystone-moon/keystone/tests/unit/federation_fixtures.py b/keystone-moon/keystone/tests/unit/federation_fixtures.py
deleted file mode 100644
index d4527d9c..00000000
--- a/keystone-moon/keystone/tests/unit/federation_fixtures.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-IDP_ENTITY_ID = 'https://localhost/v3/OS-FEDERATION/saml2/idp'
-IDP_SSO_ENDPOINT = 'https://localhost/v3/OS-FEDERATION/saml2/SSO'
-
-# Organization info
-IDP_ORGANIZATION_NAME = 'ACME INC'
-IDP_ORGANIZATION_DISPLAY_NAME = 'ACME'
-IDP_ORGANIZATION_URL = 'https://acme.example.com'
-
-# Contact info
-IDP_CONTACT_COMPANY = 'ACME Sub'
-IDP_CONTACT_GIVEN_NAME = 'Joe'
-IDP_CONTACT_SURNAME = 'Hacker'
-IDP_CONTACT_EMAIL = 'joe@acme.example.com'
-IDP_CONTACT_TELEPHONE_NUMBER = '1234567890'
-IDP_CONTACT_TYPE = 'technical'
diff --git a/keystone-moon/keystone/tests/unit/filtering.py b/keystone-moon/keystone/tests/unit/filtering.py
deleted file mode 100644
index 59301299..00000000
--- a/keystone-moon/keystone/tests/unit/filtering.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-from six.moves import range
-
-
-CONF = cfg.CONF
-
-
-class FilterTests(object):
-
- # Provide support for checking if a batch of list items all
- # exist within a contiguous range in a total list
- def _match_with_list(self, this_batch, total_list,
- batch_size=None,
- list_start=None, list_end=None):
- if batch_size is None:
- batch_size = len(this_batch)
- if list_start is None:
- list_start = 0
- if list_end is None:
- list_end = len(total_list)
- for batch_item in range(0, batch_size):
- found = False
- for list_item in range(list_start, list_end):
- if this_batch[batch_item]['id'] == total_list[list_item]['id']:
- found = True
- self.assertTrue(found)
-
- def _create_entity(self, entity_type):
- """Find the create_<entity_type> method.
-
- Searches through the [identity_api, resource_api, assignment_api]
- managers for a method called create_<entity_type> and returns the first
- one.
-
- """
- f = getattr(self.identity_api, 'create_%s' % entity_type, None)
- if f is None:
- f = getattr(self.resource_api, 'create_%s' % entity_type, None)
- if f is None:
- f = getattr(self.assignment_api, 'create_%s' % entity_type)
- return f
-
- def _delete_entity(self, entity_type):
- """Find the delete_<entity_type> method.
-
- Searches through the [identity_api, resource_api, assignment_api]
- managers for a method called delete_<entity_type> and returns the first
- one.
-
- """
- f = getattr(self.identity_api, 'delete_%s' % entity_type, None)
- if f is None:
- f = getattr(self.resource_api, 'delete_%s' % entity_type, None)
- if f is None:
- f = getattr(self.assignment_api, 'delete_%s' % entity_type)
- return f
-
- def _list_entities(self, entity_type):
- """Find the list_<entity_type> method.
-
- Searches through the [identity_api, resource_api, assignment_api]
- managers for a method called list_<entity_type> and returns the first
- one.
-
- """
- f = getattr(self.identity_api, 'list_%ss' % entity_type, None)
- if f is None:
- f = getattr(self.resource_api, 'list_%ss' % entity_type, None)
- if f is None:
- f = getattr(self.assignment_api, 'list_%ss' % entity_type)
- return f
-
- def _create_one_entity(self, entity_type, domain_id, name):
- new_entity = {'name': name,
- 'domain_id': domain_id}
- if entity_type in ['user', 'group']:
- # The manager layer creates the ID for users and groups
- new_entity = self._create_entity(entity_type)(new_entity)
- else:
- new_entity['id'] = '0000' + uuid.uuid4().hex
- self._create_entity(entity_type)(new_entity['id'], new_entity)
- return new_entity
-
- def _create_test_data(self, entity_type, number, domain_id=None,
- name_dict=None):
- """Create entity test data
-
- :param entity_type: type of entity to create, e.g. 'user', group' etc.
- :param number: number of entities to create,
- :param domain_id: if not defined, all users will be created in the
- default domain.
- :param name_dict: optional dict containing entity number and name pairs
-
- """
- entity_list = []
- if domain_id is None:
- domain_id = CONF.identity.default_domain_id
- name_dict = name_dict or {}
- for x in range(number):
- # If this index has a name defined in the name_dict, then use it
- name = name_dict.get(x, uuid.uuid4().hex)
- new_entity = self._create_one_entity(entity_type, domain_id, name)
- entity_list.append(new_entity)
- return entity_list
-
- def _delete_test_data(self, entity_type, entity_list):
- for entity in entity_list:
- self._delete_entity(entity_type)(entity['id'])
diff --git a/keystone-moon/keystone/tests/unit/identity/__init__.py b/keystone-moon/keystone/tests/unit/identity/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/identity/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/identity/test_backends.py b/keystone-moon/keystone/tests/unit/identity/test_backends.py
deleted file mode 100644
index 8b5c0def..00000000
--- a/keystone-moon/keystone/tests/unit/identity/test_backends.py
+++ /dev/null
@@ -1,1297 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import mock
-from oslo_config import cfg
-from six.moves import range
-from testtools import matchers
-
-from keystone.common import driver_hints
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import filtering
-
-
-CONF = cfg.CONF
-
-
-class IdentityTests(object):
-
- def _get_domain_fixture(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- return domain
-
- def _set_domain_scope(self, domain_id):
- # We only provide a domain scope if we have multiple drivers
- if CONF.identity.domain_specific_drivers_enabled:
- return domain_id
-
- def test_authenticate_bad_user(self):
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=uuid.uuid4().hex,
- password=self.user_foo['password'])
-
- def test_authenticate_bad_password(self):
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=self.user_foo['id'],
- password=uuid.uuid4().hex)
-
- def test_authenticate(self):
- user_ref = self.identity_api.authenticate(
- context={},
- user_id=self.user_sna['id'],
- password=self.user_sna['password'])
- # NOTE(termie): the password field is left in user_sna to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- self.user_sna.pop('password')
- self.user_sna['enabled'] = True
- self.assertDictEqual(self.user_sna, user_ref)
-
- def test_authenticate_and_get_roles_no_metadata(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- # Remove user id. It is ignored by create_user() and will break the
- # subset test below.
- del user['id']
-
- new_user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- new_user['id'])
- user_ref = self.identity_api.authenticate(
- context={},
- user_id=new_user['id'],
- password=user['password'])
- self.assertNotIn('password', user_ref)
- # NOTE(termie): the password field is left in user_sna to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- user.pop('password')
- self.assertDictContainsSubset(user, user_ref)
- role_list = self.assignment_api.get_roles_for_user_and_project(
- new_user['id'], self.tenant_baz['id'])
- self.assertEqual(1, len(role_list))
- self.assertIn(CONF.member_role_id, role_list)
-
- def test_authenticate_if_no_password_set(self):
- id_ = uuid.uuid4().hex
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- self.identity_api.create_user(user)
-
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=id_,
- password='password')
-
- def test_create_unicode_user_name(self):
- unicode_name = u'name \u540d\u5b57'
- user = unit.new_user_ref(name=unicode_name,
- domain_id=CONF.identity.default_domain_id)
- ref = self.identity_api.create_user(user)
- self.assertEqual(unicode_name, ref['name'])
-
- def test_get_user(self):
- user_ref = self.identity_api.get_user(self.user_foo['id'])
- # NOTE(termie): the password field is left in user_foo to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- self.user_foo.pop('password')
- self.assertDictEqual(self.user_foo, user_ref)
-
- @unit.skip_if_cache_disabled('identity')
- def test_cache_layer_get_user(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- # cache the result.
- self.identity_api.get_user(ref['id'])
- # delete bypassing identity api
- domain_id, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(ref['id']))
- driver.delete_user(entity_id)
-
- self.assertDictEqual(ref, self.identity_api.get_user(ref['id']))
- self.identity_api.get_user.invalidate(self.identity_api, ref['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user, ref['id'])
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- user['description'] = uuid.uuid4().hex
- # cache the result.
- self.identity_api.get_user(ref['id'])
- # update using identity api and get back updated user.
- user_updated = self.identity_api.update_user(ref['id'], user)
- self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
- user_updated)
- self.assertDictContainsSubset(
- self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
- user_updated)
-
- def test_get_user_returns_not_found(self):
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- uuid.uuid4().hex)
-
- def test_get_user_by_name(self):
- user_ref = self.identity_api.get_user_by_name(
- self.user_foo['name'], CONF.identity.default_domain_id)
- # NOTE(termie): the password field is left in user_foo to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- self.user_foo.pop('password')
- self.assertDictEqual(self.user_foo, user_ref)
-
- @unit.skip_if_cache_disabled('identity')
- def test_cache_layer_get_user_by_name(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- # delete bypassing the identity api.
- domain_id, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(ref['id']))
- driver.delete_user(entity_id)
-
- self.assertDictEqual(ref, self.identity_api.get_user_by_name(
- user['name'], CONF.identity.default_domain_id))
- self.identity_api.get_user_by_name.invalidate(
- self.identity_api, user['name'], CONF.identity.default_domain_id)
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user_by_name,
- user['name'], CONF.identity.default_domain_id)
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- user['description'] = uuid.uuid4().hex
- user_updated = self.identity_api.update_user(ref['id'], user)
- self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
- user_updated)
- self.assertDictContainsSubset(
- self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
- user_updated)
-
- def test_get_user_by_name_returns_not_found(self):
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user_by_name,
- uuid.uuid4().hex,
- CONF.identity.default_domain_id)
-
- def test_create_duplicate_user_name_fails(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.assertRaises(exception.Conflict,
- self.identity_api.create_user,
- user)
-
- def test_create_duplicate_user_name_in_different_domains(self):
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- user2 = unit.new_user_ref(name=user1['name'],
- domain_id=new_domain['id'])
-
- self.identity_api.create_user(user1)
- self.identity_api.create_user(user2)
-
- def test_move_user_between_domains(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- user = unit.new_user_ref(domain_id=domain1['id'])
- user = self.identity_api.create_user(user)
- user['domain_id'] = domain2['id']
- # Update the user asserting that a deprecation warning is emitted
- with mock.patch(
- 'oslo_log.versionutils.report_deprecated_feature') as mock_dep:
- self.identity_api.update_user(user['id'], user)
- self.assertTrue(mock_dep.called)
-
- updated_user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(domain2['id'], updated_user_ref['domain_id'])
-
- def test_move_user_between_domains_with_clashing_names_fails(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- # First, create a user in domain1
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- # Now create a user in domain2 with a potentially clashing
- # name - which should work since we have domain separation
- user2 = unit.new_user_ref(name=user1['name'],
- domain_id=domain2['id'])
- user2 = self.identity_api.create_user(user2)
- # Now try and move user1 into the 2nd domain - which should
- # fail since the names clash
- user1['domain_id'] = domain2['id']
- self.assertRaises(exception.Conflict,
- self.identity_api.update_user,
- user1['id'],
- user1)
-
- def test_rename_duplicate_user_name_fails(self):
- user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user2 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- self.identity_api.create_user(user1)
- user2 = self.identity_api.create_user(user2)
- user2['name'] = user1['name']
- self.assertRaises(exception.Conflict,
- self.identity_api.update_user,
- user2['id'],
- user2)
-
- def test_update_user_id_fails(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- original_id = user['id']
- user['id'] = 'fake2'
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- original_id,
- user)
- user_ref = self.identity_api.get_user(original_id)
- self.assertEqual(original_id, user_ref['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- 'fake2')
-
- def test_delete_user_with_group_project_domain_links(self):
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- self.identity_api.add_user_to_group(user_id=user1['id'],
- group_id=group1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- self.identity_api.check_user_in_group(
- user_id=user1['id'],
- group_id=group1['id'])
- self.identity_api.delete_user(user1['id'])
- self.assertRaises(exception.NotFound,
- self.identity_api.check_user_in_group,
- user1['id'],
- group1['id'])
-
- def test_delete_group_with_user_project_domain_links(self):
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
- user1 = unit.new_user_ref(domain_id=domain1['id'])
- user1 = self.identity_api.create_user(user1)
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
-
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- self.identity_api.add_user_to_group(user_id=user1['id'],
- group_id=group1['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- self.identity_api.check_user_in_group(
- user_id=user1['id'],
- group_id=group1['id'])
- self.identity_api.delete_group(group1['id'])
- self.identity_api.get_user(user1['id'])
-
- def test_update_user_returns_not_found(self):
- user_id = uuid.uuid4().hex
- self.assertRaises(exception.UserNotFound,
- self.identity_api.update_user,
- user_id,
- {'id': user_id,
- 'domain_id': CONF.identity.default_domain_id})
-
- def test_delete_user_returns_not_found(self):
- self.assertRaises(exception.UserNotFound,
- self.identity_api.delete_user,
- uuid.uuid4().hex)
-
- def test_create_user_long_name_fails(self):
- user = unit.new_user_ref(name='a' * 256,
- domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_create_user_blank_name_fails(self):
- user = unit.new_user_ref(name='',
- domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_create_user_missed_password(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.identity_api.get_user(user['id'])
- # Make sure the user is not allowed to login
- # with a password that is empty string or None
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password='')
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password=None)
-
- def test_create_user_none_password(self):
- user = unit.new_user_ref(password=None,
- domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.identity_api.get_user(user['id'])
- # Make sure the user is not allowed to login
- # with a password that is empty string or None
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password='')
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password=None)
-
- def test_create_user_invalid_name_fails(self):
- user = unit.new_user_ref(name=None,
- domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- user = unit.new_user_ref(name=123,
- domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_create_user_invalid_enabled_type_string(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id,
- # invalid string value
- enabled='true')
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_update_user_long_name_fails(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- user['name'] = 'a' * 256
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_update_user_blank_name_fails(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- user['name'] = ''
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_update_user_invalid_name_fails(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
-
- user['name'] = None
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- user['name'] = 123
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_list_users(self):
- users = self.identity_api.list_users(
- domain_scope=self._set_domain_scope(
- CONF.identity.default_domain_id))
- self.assertEqual(len(default_fixtures.USERS), len(users))
- user_ids = set(user['id'] for user in users)
- expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
- for user in default_fixtures.USERS)
- for user_ref in users:
- self.assertNotIn('password', user_ref)
- self.assertEqual(expected_user_ids, user_ids)
-
- def test_list_groups(self):
- group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group2 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group1 = self.identity_api.create_group(group1)
- group2 = self.identity_api.create_group(group2)
- groups = self.identity_api.list_groups(
- domain_scope=self._set_domain_scope(
- CONF.identity.default_domain_id))
- self.assertEqual(2, len(groups))
- group_ids = []
- for group in groups:
- group_ids.append(group.get('id'))
- self.assertIn(group1['id'], group_ids)
- self.assertIn(group2['id'], group_ids)
-
- def test_create_user_doesnt_modify_passed_in_dict(self):
- new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- original_user = new_user.copy()
- self.identity_api.create_user(new_user)
- self.assertDictEqual(original_user, new_user)
-
- def test_update_user_enable(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertTrue(user_ref['enabled'])
-
- user['enabled'] = False
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(user['enabled'], user_ref['enabled'])
-
- # If not present, enabled field should not be updated
- del user['enabled']
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertFalse(user_ref['enabled'])
-
- user['enabled'] = True
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(user['enabled'], user_ref['enabled'])
-
- del user['enabled']
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertTrue(user_ref['enabled'])
-
- # Integers are valid Python's booleans. Explicitly test it.
- user['enabled'] = 0
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertFalse(user_ref['enabled'])
-
- # Any integers other than 0 are interpreted as True
- user['enabled'] = -42
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- # NOTE(breton): below, attribute `enabled` is explicitly tested to be
- # equal True. assertTrue should not be used, because it converts
- # the passed value to bool().
- self.assertIs(user_ref['enabled'], True)
-
- def test_update_user_name(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(user['name'], user_ref['name'])
-
- changed_name = user_ref['name'] + '_changed'
- user_ref['name'] = changed_name
- updated_user = self.identity_api.update_user(user_ref['id'], user_ref)
-
- # NOTE(dstanek): the SQL backend adds an 'extra' field containing a
- # dictionary of the extra fields in addition to the
- # fields in the object. For the details see:
- # SqlIdentity.test_update_project_returns_extra
- updated_user.pop('extra', None)
-
- self.assertDictEqual(user_ref, updated_user)
-
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertEqual(changed_name, user_ref['name'])
-
- def test_update_user_enable_fails(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertTrue(user_ref['enabled'])
-
- # Strings are not valid boolean values
- user['enabled'] = 'false'
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_add_user_to_group(self):
- domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- groups = self.identity_api.list_groups_for_user(new_user['id'])
-
- found = False
- for x in groups:
- if (x['id'] == new_group['id']):
- found = True
- self.assertTrue(found)
-
- def test_add_user_to_group_returns_not_found(self):
- domain = self._get_domain_fixture()
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.add_user_to_group,
- new_user['id'],
- uuid.uuid4().hex)
-
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- self.assertRaises(exception.UserNotFound,
- self.identity_api.add_user_to_group,
- uuid.uuid4().hex,
- new_group['id'])
-
- self.assertRaises(exception.NotFound,
- self.identity_api.add_user_to_group,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_check_user_in_group(self):
- domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- self.identity_api.check_user_in_group(new_user['id'], new_group['id'])
-
- def test_check_user_not_in_group(self):
- new_group = unit.new_group_ref(
- domain_id=CONF.identity.default_domain_id)
- new_group = self.identity_api.create_group(new_group)
-
- new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- new_user = self.identity_api.create_user(new_user)
-
- self.assertRaises(exception.NotFound,
- self.identity_api.check_user_in_group,
- new_user['id'],
- new_group['id'])
-
- def test_check_user_in_group_returns_not_found(self):
- new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- new_user = self.identity_api.create_user(new_user)
-
- new_group = unit.new_group_ref(
- domain_id=CONF.identity.default_domain_id)
- new_group = self.identity_api.create_group(new_group)
-
- self.assertRaises(exception.UserNotFound,
- self.identity_api.check_user_in_group,
- uuid.uuid4().hex,
- new_group['id'])
-
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.check_user_in_group,
- new_user['id'],
- uuid.uuid4().hex)
-
- self.assertRaises(exception.NotFound,
- self.identity_api.check_user_in_group,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_list_users_in_group(self):
- domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- # Make sure we get an empty list back on a new group, not an error.
- user_refs = self.identity_api.list_users_in_group(new_group['id'])
- self.assertEqual([], user_refs)
- # Make sure we get the correct users back once they have been added
- # to the group.
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- user_refs = self.identity_api.list_users_in_group(new_group['id'])
- found = False
- for x in user_refs:
- if (x['id'] == new_user['id']):
- found = True
- self.assertNotIn('password', x)
- self.assertTrue(found)
-
- def test_list_users_in_group_returns_not_found(self):
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.list_users_in_group,
- uuid.uuid4().hex)
-
- def test_list_groups_for_user(self):
- domain = self._get_domain_fixture()
- test_groups = []
- test_users = []
- GROUP_COUNT = 3
- USER_COUNT = 2
-
- for x in range(0, USER_COUNT):
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
- test_users.append(new_user)
- positive_user = test_users[0]
- negative_user = test_users[1]
-
- for x in range(0, USER_COUNT):
- group_refs = self.identity_api.list_groups_for_user(
- test_users[x]['id'])
- self.assertEqual(0, len(group_refs))
-
- for x in range(0, GROUP_COUNT):
- before_count = x
- after_count = x + 1
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- test_groups.append(new_group)
-
- # add the user to the group and ensure that the
- # group count increases by one for each
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(before_count, len(group_refs))
- self.identity_api.add_user_to_group(
- positive_user['id'],
- new_group['id'])
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(after_count, len(group_refs))
-
- # Make sure the group count for the unrelated user did not change
- group_refs = self.identity_api.list_groups_for_user(
- negative_user['id'])
- self.assertEqual(0, len(group_refs))
-
- # remove the user from each group and ensure that
- # the group count reduces by one for each
- for x in range(0, 3):
- before_count = GROUP_COUNT - x
- after_count = GROUP_COUNT - x - 1
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(before_count, len(group_refs))
- self.identity_api.remove_user_from_group(
- positive_user['id'],
- test_groups[x]['id'])
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(after_count, len(group_refs))
- # Make sure the group count for the unrelated user
- # did not change
- group_refs = self.identity_api.list_groups_for_user(
- negative_user['id'])
- self.assertEqual(0, len(group_refs))
-
- def test_remove_user_from_group(self):
- domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- groups = self.identity_api.list_groups_for_user(new_user['id'])
- self.assertIn(new_group['id'], [x['id'] for x in groups])
- self.identity_api.remove_user_from_group(new_user['id'],
- new_group['id'])
- groups = self.identity_api.list_groups_for_user(new_user['id'])
- self.assertNotIn(new_group['id'], [x['id'] for x in groups])
-
- def test_remove_user_from_group_returns_not_found(self):
- domain = self._get_domain_fixture()
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.remove_user_from_group,
- new_user['id'],
- uuid.uuid4().hex)
-
- self.assertRaises(exception.UserNotFound,
- self.identity_api.remove_user_from_group,
- uuid.uuid4().hex,
- new_group['id'])
-
- self.assertRaises(exception.NotFound,
- self.identity_api.remove_user_from_group,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_group_crud(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- group = unit.new_group_ref(domain_id=domain['id'])
- group = self.identity_api.create_group(group)
- group_ref = self.identity_api.get_group(group['id'])
- self.assertDictContainsSubset(group, group_ref)
-
- group['name'] = uuid.uuid4().hex
- self.identity_api.update_group(group['id'], group)
- group_ref = self.identity_api.get_group(group['id'])
- self.assertDictContainsSubset(group, group_ref)
-
- self.identity_api.delete_group(group['id'])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group,
- group['id'])
-
- def test_get_group_by_name(self):
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group_name = group['name']
- group = self.identity_api.create_group(group)
- spoiler = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- self.identity_api.create_group(spoiler)
-
- group_ref = self.identity_api.get_group_by_name(
- group_name, CONF.identity.default_domain_id)
- self.assertDictEqual(group, group_ref)
-
- def test_get_group_by_name_returns_not_found(self):
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group_by_name,
- uuid.uuid4().hex,
- CONF.identity.default_domain_id)
-
- @unit.skip_if_cache_disabled('identity')
- def test_cache_layer_group_crud(self):
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- # cache the result
- group_ref = self.identity_api.get_group(group['id'])
- # delete the group bypassing identity api.
- domain_id, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(group['id']))
- driver.delete_group(entity_id)
-
- self.assertEqual(group_ref, self.identity_api.get_group(group['id']))
- self.identity_api.get_group.invalidate(self.identity_api, group['id'])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group, group['id'])
-
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- # cache the result
- self.identity_api.get_group(group['id'])
- group['name'] = uuid.uuid4().hex
- group_ref = self.identity_api.update_group(group['id'], group)
- # after updating through identity api, get updated group
- self.assertDictContainsSubset(self.identity_api.get_group(group['id']),
- group_ref)
-
- def test_create_duplicate_group_name_fails(self):
- group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group2 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id,
- name=group1['name'])
- group1 = self.identity_api.create_group(group1)
- self.assertRaises(exception.Conflict,
- self.identity_api.create_group,
- group2)
-
- def test_create_duplicate_group_name_in_different_domains(self):
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group2 = unit.new_group_ref(domain_id=new_domain['id'],
- name=group1['name'])
- group1 = self.identity_api.create_group(group1)
- group2 = self.identity_api.create_group(group2)
-
- def test_move_group_between_domains(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- group = unit.new_group_ref(domain_id=domain1['id'])
- group = self.identity_api.create_group(group)
- group['domain_id'] = domain2['id']
- # Update the group asserting that a deprecation warning is emitted
- with mock.patch(
- 'oslo_log.versionutils.report_deprecated_feature') as mock_dep:
- self.identity_api.update_group(group['id'], group)
- self.assertTrue(mock_dep.called)
-
- updated_group_ref = self.identity_api.get_group(group['id'])
- self.assertEqual(domain2['id'], updated_group_ref['domain_id'])
-
- def test_move_group_between_domains_with_clashing_names_fails(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- # First, create a group in domain1
- group1 = unit.new_group_ref(domain_id=domain1['id'])
- group1 = self.identity_api.create_group(group1)
- # Now create a group in domain2 with a potentially clashing
- # name - which should work since we have domain separation
- group2 = unit.new_group_ref(name=group1['name'],
- domain_id=domain2['id'])
- group2 = self.identity_api.create_group(group2)
- # Now try and move group1 into the 2nd domain - which should
- # fail since the names clash
- group1['domain_id'] = domain2['id']
- self.assertRaises(exception.Conflict,
- self.identity_api.update_group,
- group1['id'],
- group1)
-
- def test_user_crud(self):
- user_dict = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id)
- del user_dict['id']
- user = self.identity_api.create_user(user_dict)
- user_ref = self.identity_api.get_user(user['id'])
- del user_dict['password']
- user_ref_dict = {x: user_ref[x] for x in user_ref}
- self.assertDictContainsSubset(user_dict, user_ref_dict)
-
- user_dict['password'] = uuid.uuid4().hex
- self.identity_api.update_user(user['id'], user_dict)
- user_ref = self.identity_api.get_user(user['id'])
- del user_dict['password']
- user_ref_dict = {x: user_ref[x] for x in user_ref}
- self.assertDictContainsSubset(user_dict, user_ref_dict)
-
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- user['id'])
-
- def test_arbitrary_attributes_are_returned_from_create_user(self):
- attr_value = uuid.uuid4().hex
- user_data = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id,
- arbitrary_attr=attr_value)
-
- user = self.identity_api.create_user(user_data)
-
- self.assertEqual(attr_value, user['arbitrary_attr'])
-
- def test_arbitrary_attributes_are_returned_from_get_user(self):
- attr_value = uuid.uuid4().hex
- user_data = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id,
- arbitrary_attr=attr_value)
-
- user_data = self.identity_api.create_user(user_data)
-
- user = self.identity_api.get_user(user_data['id'])
- self.assertEqual(attr_value, user['arbitrary_attr'])
-
- def test_new_arbitrary_attributes_are_returned_from_update_user(self):
- user_data = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id)
-
- user = self.identity_api.create_user(user_data)
- attr_value = uuid.uuid4().hex
- user['arbitrary_attr'] = attr_value
- updated_user = self.identity_api.update_user(user['id'], user)
-
- self.assertEqual(attr_value, updated_user['arbitrary_attr'])
-
- def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
- attr_value = uuid.uuid4().hex
- user_data = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id,
- arbitrary_attr=attr_value)
-
- new_attr_value = uuid.uuid4().hex
- user = self.identity_api.create_user(user_data)
- user['arbitrary_attr'] = new_attr_value
- updated_user = self.identity_api.update_user(user['id'], user)
-
- self.assertEqual(new_attr_value, updated_user['arbitrary_attr'])
-
- def test_user_update_and_user_get_return_same_response(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- user = self.identity_api.create_user(user)
-
- updated_user = {'enabled': False}
- updated_user_ref = self.identity_api.update_user(
- user['id'], updated_user)
-
- # SQL backend adds 'extra' field
- updated_user_ref.pop('extra', None)
-
- self.assertIs(False, updated_user_ref['enabled'])
-
- user_ref = self.identity_api.get_user(user['id'])
- self.assertDictEqual(updated_user_ref, user_ref)
-
-
-class FilterTests(filtering.FilterTests):
- def test_list_entities_filtered(self):
- for entity in ['user', 'group', 'project']:
- # Create 20 entities
- entity_list = self._create_test_data(entity, 20)
-
- # Try filtering to get one an exact item out of the list
- hints = driver_hints.Hints()
- hints.add_filter('name', entity_list[10]['name'])
- entities = self._list_entities(entity)(hints=hints)
- self.assertEqual(1, len(entities))
- self.assertEqual(entity_list[10]['id'], entities[0]['id'])
- # Check the driver has removed the filter from the list hints
- self.assertFalse(hints.get_exact_filter_by_name('name'))
- self._delete_test_data(entity, entity_list)
-
- def test_list_users_inexact_filtered(self):
- # Create 20 users, some with specific names. We set the names at create
- # time (rather than updating them), since the LDAP driver does not
- # support name updates.
- user_name_data = {
- # user index: name for user
- 5: 'The',
- 6: 'The Ministry',
- 7: 'The Ministry of',
- 8: 'The Ministry of Silly',
- 9: 'The Ministry of Silly Walks',
- # ...and one for useful case insensitivity testing
- 10: 'The ministry of silly walks OF'
- }
- user_list = self._create_test_data(
- 'user', 20, domain_id=CONF.identity.default_domain_id,
- name_dict=user_name_data)
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'ministry', comparator='contains')
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual(5, len(users))
- self._match_with_list(users, user_list,
- list_start=6, list_end=11)
- # TODO(henry-nash) Check inexact filter has been removed.
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'The', comparator='startswith')
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual(6, len(users))
- self._match_with_list(users, user_list,
- list_start=5, list_end=11)
- # TODO(henry-nash) Check inexact filter has been removed.
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'of', comparator='endswith')
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual(2, len(users))
- # We can't assume we will get back the users in any particular order
- self.assertIn(user_list[7]['id'], [users[0]['id'], users[1]['id']])
- self.assertIn(user_list[10]['id'], [users[0]['id'], users[1]['id']])
- # TODO(henry-nash) Check inexact filter has been removed.
-
- # TODO(henry-nash): Add some case sensitive tests. However,
- # these would be hard to validate currently, since:
- #
- # For SQL, the issue is that MySQL 0.7, by default, is installed in
- # case insensitive mode (which is what is run by default for our
- # SQL backend tests). For production deployments. OpenStack
- # assumes a case sensitive database. For these tests, therefore, we
- # need to be able to check the sensitivity of the database so as to
- # know whether to run case sensitive tests here.
- #
- # For LDAP/AD, although dependent on the schema being used, attributes
- # are typically configured to be case aware, but not case sensitive.
-
- self._delete_test_data('user', user_list)
-
- def _groups_for_user_data(self):
- number_of_groups = 10
- group_name_data = {
- # entity index: name for entity
- 5: 'The',
- 6: 'The Ministry',
- 9: 'The Ministry of Silly Walks',
- }
- group_list = self._create_test_data(
- 'group', number_of_groups,
- domain_id=CONF.identity.default_domain_id,
- name_dict=group_name_data)
- user_list = self._create_test_data('user', 2)
-
- for group in range(7):
- # Create membership, including with two out of the three groups
- # with well know names
- self.identity_api.add_user_to_group(user_list[0]['id'],
- group_list[group]['id'])
- # ...and some spoiler memberships
- for group in range(7, number_of_groups):
- self.identity_api.add_user_to_group(user_list[1]['id'],
- group_list[group]['id'])
-
- return group_list, user_list
-
- def test_groups_for_user_inexact_filtered(self):
- """Test use of filtering doesn't break groups_for_user listing.
-
- Some backends may use filtering to achieve the list of groups for a
- user, so test that it can combine a second filter.
-
- Test Plan:
-
- - Create 10 groups, some with names we can filter on
- - Create 2 users
- - Assign 1 of those users to most of the groups, including some of the
- well known named ones
- - Assign the other user to other groups as spoilers
- - Ensure that when we list groups for users with a filter on the group
- name, both restrictions have been enforced on what is returned.
-
- """
- group_list, user_list = self._groups_for_user_data()
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'Ministry', comparator='contains')
- groups = self.identity_api.list_groups_for_user(
- user_list[0]['id'], hints=hints)
- # We should only get back one group, since of the two that contain
- # 'Ministry' the user only belongs to one.
- self.assertThat(len(groups), matchers.Equals(1))
- self.assertEqual(group_list[6]['id'], groups[0]['id'])
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'The', comparator='startswith')
- groups = self.identity_api.list_groups_for_user(
- user_list[0]['id'], hints=hints)
- # We should only get back 2 out of the 3 groups that start with 'The'
- # hence showing that both "filters" have been applied
- self.assertThat(len(groups), matchers.Equals(2))
- self.assertIn(group_list[5]['id'], [groups[0]['id'], groups[1]['id']])
- self.assertIn(group_list[6]['id'], [groups[0]['id'], groups[1]['id']])
-
- hints.add_filter('name', 'The', comparator='endswith')
- groups = self.identity_api.list_groups_for_user(
- user_list[0]['id'], hints=hints)
- # We should only get back one group since it is the only one that
- # ends with 'The'
- self.assertThat(len(groups), matchers.Equals(1))
- self.assertEqual(group_list[5]['id'], groups[0]['id'])
-
- self._delete_test_data('user', user_list)
- self._delete_test_data('group', group_list)
-
- def test_groups_for_user_exact_filtered(self):
- """Test exact filters doesn't break groups_for_user listing."""
- group_list, user_list = self._groups_for_user_data()
- hints = driver_hints.Hints()
- hints.add_filter('name', 'The Ministry', comparator='equals')
- groups = self.identity_api.list_groups_for_user(
- user_list[0]['id'], hints=hints)
- # We should only get back 1 out of the 3 groups with name 'The
- # Ministry' hence showing that both "filters" have been applied.
- self.assertEqual(1, len(groups))
- self.assertEqual(group_list[6]['id'], groups[0]['id'])
- self._delete_test_data('user', user_list)
- self._delete_test_data('group', group_list)
-
- def _get_user_name_field_size(self):
- """Return the size of the user name field for the backend.
-
- Subclasses can override this method to indicate that the user name
- field is limited in length. The user name is the field used in the test
- that validates that a filter value works even if it's longer than a
- field.
-
- If the backend doesn't limit the value length then return None.
-
- """
- return None
-
- def test_filter_value_wider_than_field(self):
- # If a filter value is given that's larger than the field in the
- # backend then no values are returned.
-
- user_name_field_size = self._get_user_name_field_size()
-
- if user_name_field_size is None:
- # The backend doesn't limit the size of the user name, so pass this
- # test.
- return
-
- # Create some users just to make sure would return something if the
- # filter was ignored.
- self._create_test_data('user', 2)
-
- hints = driver_hints.Hints()
- value = 'A' * (user_name_field_size + 1)
- hints.add_filter('name', value)
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual([], users)
-
- def _list_users_in_group_data(self):
- number_of_users = 10
- user_name_data = {
- 1: 'Arthur Conan Doyle',
- 3: 'Arthur Rimbaud',
- 9: 'Arthur Schopenhauer',
- }
- user_list = self._create_test_data(
- 'user', number_of_users,
- domain_id=CONF.identity.default_domain_id,
- name_dict=user_name_data)
- group = self._create_one_entity(
- 'group', CONF.identity.default_domain_id, 'Great Writers')
- for i in range(7):
- self.identity_api.add_user_to_group(user_list[i]['id'],
- group['id'])
-
- return user_list, group
-
- def test_list_users_in_group_inexact_filtered(self):
- user_list, group = self._list_users_in_group_data()
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'Arthur', comparator='contains')
- users = self.identity_api.list_users_in_group(group['id'], hints=hints)
- self.assertThat(len(users), matchers.Equals(2))
- self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']])
- self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']])
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'Arthur', comparator='startswith')
- users = self.identity_api.list_users_in_group(group['id'], hints=hints)
- self.assertThat(len(users), matchers.Equals(2))
- self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']])
- self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']])
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'Doyle', comparator='endswith')
- users = self.identity_api.list_users_in_group(group['id'], hints=hints)
- self.assertThat(len(users), matchers.Equals(1))
- self.assertEqual(user_list[1]['id'], users[0]['id'])
-
- self._delete_test_data('user', user_list)
- self._delete_entity('group')(group['id'])
-
- def test_list_users_in_group_exact_filtered(self):
- hints = driver_hints.Hints()
- user_list, group = self._list_users_in_group_data()
- hints.add_filter('name', 'Arthur Rimbaud', comparator='equals')
- users = self.identity_api.list_users_in_group(group['id'], hints=hints)
- self.assertEqual(1, len(users))
- self.assertEqual(user_list[3]['id'], users[0]['id'])
- self._delete_test_data('user', user_list)
- self._delete_entity('group')(group['id'])
-
-
-class LimitTests(filtering.FilterTests):
- ENTITIES = ['user', 'group', 'project']
-
- def setUp(self):
- """Setup for Limit Test Cases."""
- self.entity_lists = {}
-
- for entity in self.ENTITIES:
- # Create 20 entities
- self.entity_lists[entity] = self._create_test_data(entity, 20)
- self.addCleanup(self.clean_up_entities)
-
- def clean_up_entities(self):
- """Clean up entity test data from Limit Test Cases."""
- for entity in self.ENTITIES:
- self._delete_test_data(entity, self.entity_lists[entity])
- del self.entity_lists
-
- def _test_list_entity_filtered_and_limited(self, entity):
- self.config_fixture.config(list_limit=10)
- # Should get back just 10 entities
- hints = driver_hints.Hints()
- entities = self._list_entities(entity)(hints=hints)
- self.assertEqual(hints.limit['limit'], len(entities))
- self.assertTrue(hints.limit['truncated'])
-
- # Override with driver specific limit
- if entity == 'project':
- self.config_fixture.config(group='resource', list_limit=5)
- else:
- self.config_fixture.config(group='identity', list_limit=5)
-
- # Should get back just 5 users
- hints = driver_hints.Hints()
- entities = self._list_entities(entity)(hints=hints)
- self.assertEqual(hints.limit['limit'], len(entities))
-
- # Finally, let's pretend we want to get the full list of entities,
- # even with the limits set, as part of some internal calculation.
- # Calling the API without a hints list should achieve this, and
- # return at least the 20 entries we created (there may be other
- # entities lying around created by other tests/setup).
- entities = self._list_entities(entity)()
- self.assertTrue(len(entities) >= 20)
- self._match_with_list(self.entity_lists[entity], entities)
-
- def test_list_users_filtered_and_limited(self):
- self._test_list_entity_filtered_and_limited('user')
-
- def test_list_groups_filtered_and_limited(self):
- self._test_list_entity_filtered_and_limited('group')
-
- def test_list_projects_filtered_and_limited(self):
- self._test_list_entity_filtered_and_limited('project')
diff --git a/keystone-moon/keystone/tests/unit/identity/test_controllers.py b/keystone-moon/keystone/tests/unit/identity/test_controllers.py
deleted file mode 100644
index ed2fe3ff..00000000
--- a/keystone-moon/keystone/tests/unit/identity/test_controllers.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2016 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-
-from keystone import exception
-from keystone.identity import controllers
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}}
-
-
-class UserTestCaseNoDefaultDomain(unit.TestCase):
-
- def setUp(self):
- super(UserTestCaseNoDefaultDomain, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
- self.user_controller = controllers.User()
-
- def test_setup(self):
- # Other tests in this class assume there's no default domain, so make
- # sure the setUp worked as expected.
- self.assertRaises(
- exception.DomainNotFound,
- self.resource_api.get_domain, CONF.identity.default_domain_id)
-
- def test_get_users(self):
- # When list_users is done and there's no default domain, the result is
- # an empty list.
- res = self.user_controller.get_users(_ADMIN_CONTEXT)
- self.assertEqual([], res['users'])
-
- def test_get_user_by_name(self):
- # When get_user_by_name is done and there's no default domain, the
- # result is 404 Not Found
- user_name = uuid.uuid4().hex
- self.assertRaises(
- exception.UserNotFound,
- self.user_controller.get_user_by_name, _ADMIN_CONTEXT, user_name)
-
- def test_create_user(self):
- # When a user is created using the v2 controller and there's no default
- # domain, it doesn't fail with can't find domain (a default domain is
- # created)
- user = {'name': uuid.uuid4().hex}
- self.user_controller.create_user(_ADMIN_CONTEXT, user)
- # If the above doesn't fail then this is successful.
diff --git a/keystone-moon/keystone/tests/unit/identity/test_core.py b/keystone-moon/keystone/tests/unit/identity/test_core.py
deleted file mode 100644
index 39f3c701..00000000
--- a/keystone-moon/keystone/tests/unit/identity/test_core.py
+++ /dev/null
@@ -1,176 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Unit tests for core identity behavior."""
-
-import itertools
-import os
-import uuid
-
-import mock
-from oslo_config import cfg
-from oslo_config import fixture as config_fixture
-
-from keystone import exception
-from keystone import identity
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-
-class TestDomainConfigs(unit.BaseTestCase):
-
- def setUp(self):
- super(TestDomainConfigs, self).setUp()
- self.addCleanup(CONF.reset)
-
- self.tmp_dir = unit.dirs.tmp()
-
- self.config_fixture = self.useFixture(config_fixture.Config(CONF))
- self.config_fixture.config(domain_config_dir=self.tmp_dir,
- group='identity')
-
- def test_config_for_nonexistent_domain(self):
- """Having a config for a non-existent domain will be ignored.
-
- There are no assertions in this test because there are no side
- effects. If there is a config file for a domain that does not
- exist it should be ignored.
-
- """
- domain_id = uuid.uuid4().hex
- domain_config_filename = os.path.join(self.tmp_dir,
- 'keystone.%s.conf' % domain_id)
- self.addCleanup(lambda: os.remove(domain_config_filename))
- with open(domain_config_filename, 'w'):
- """Write an empty config file."""
-
- e = exception.DomainNotFound(domain_id=domain_id)
- mock_assignment_api = mock.Mock()
- mock_assignment_api.get_domain_by_name.side_effect = e
-
- domain_config = identity.DomainConfigs()
- fake_standard_driver = None
- domain_config.setup_domain_drivers(fake_standard_driver,
- mock_assignment_api)
-
- def test_config_for_dot_name_domain(self):
- # Ensure we can get the right domain name which has dots within it
- # from filename.
- domain_config_filename = os.path.join(self.tmp_dir,
- 'keystone.abc.def.com.conf')
- with open(domain_config_filename, 'w'):
- """Write an empty config file."""
- self.addCleanup(os.remove, domain_config_filename)
-
- with mock.patch.object(identity.DomainConfigs,
- '_load_config_from_file') as mock_load_config:
- domain_config = identity.DomainConfigs()
- fake_assignment_api = None
- fake_standard_driver = None
- domain_config.setup_domain_drivers(fake_standard_driver,
- fake_assignment_api)
- mock_load_config.assert_called_once_with(fake_assignment_api,
- [domain_config_filename],
- 'abc.def.com')
-
- def test_config_for_multiple_sql_backend(self):
- domains_config = identity.DomainConfigs()
-
- # Create the right sequence of is_sql in the drivers being
- # requested to expose the bug, which is that a False setting
- # means it forgets previous True settings.
- drivers = []
- files = []
- for idx, is_sql in enumerate((True, False, True)):
- drv = mock.Mock(is_sql=is_sql)
- drivers.append(drv)
- name = 'dummy.{0}'.format(idx)
- files.append(''.join((
- identity.DOMAIN_CONF_FHEAD,
- name,
- identity.DOMAIN_CONF_FTAIL)))
-
- walk_fake = lambda *a, **kwa: (
- ('/fake/keystone/domains/config', [], files), )
-
- generic_driver = mock.Mock(is_sql=False)
-
- assignment_api = mock.Mock()
- id_factory = itertools.count()
- assignment_api.get_domain_by_name.side_effect = (
- lambda name: {'id': next(id_factory), '_': 'fake_domain'})
- load_driver_mock = mock.Mock(side_effect=drivers)
-
- with mock.patch.object(os, 'walk', walk_fake):
- with mock.patch.object(identity.cfg, 'ConfigOpts'):
- with mock.patch.object(domains_config, '_load_driver',
- load_driver_mock):
- self.assertRaises(
- exception.MultipleSQLDriversInConfig,
- domains_config.setup_domain_drivers,
- generic_driver, assignment_api)
-
- self.assertEqual(3, load_driver_mock.call_count)
-
-
-class TestDatabaseDomainConfigs(unit.TestCase):
-
- def setUp(self):
- super(TestDatabaseDomainConfigs, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
-
- def test_domain_config_in_database_disabled_by_default(self):
- self.assertFalse(CONF.identity.domain_configurations_from_database)
-
- def test_loading_config_from_database(self):
- self.config_fixture.config(domain_configurations_from_database=True,
- group='identity')
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- # Override two config options for our domain
- conf = {'ldap': {'url': uuid.uuid4().hex,
- 'suffix': uuid.uuid4().hex,
- 'use_tls': 'True'},
- 'identity': {
- 'driver': 'ldap'}}
- self.domain_config_api.create_config(domain['id'], conf)
- fake_standard_driver = None
- domain_config = identity.DomainConfigs()
- domain_config.setup_domain_drivers(fake_standard_driver,
- self.resource_api)
- # Make sure our two overrides are in place, and others are not affected
- res = domain_config.get_domain_conf(domain['id'])
- self.assertEqual(conf['ldap']['url'], res.ldap.url)
- self.assertEqual(conf['ldap']['suffix'], res.ldap.suffix)
- self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope)
-
- # Make sure the override is not changing the type of the config value
- use_tls_type = type(CONF.ldap.use_tls)
- self.assertEqual(use_tls_type(conf['ldap']['use_tls']),
- res.ldap.use_tls)
-
- # Now turn off using database domain configuration and check that the
- # default config file values are now seen instead of the overrides.
- CONF.set_override('domain_configurations_from_database', False,
- 'identity', enforce_type=True)
- domain_config = identity.DomainConfigs()
- domain_config.setup_domain_drivers(fake_standard_driver,
- self.resource_api)
- res = domain_config.get_domain_conf(domain['id'])
- self.assertEqual(CONF.ldap.url, res.ldap.url)
- self.assertEqual(CONF.ldap.suffix, res.ldap.suffix)
- self.assertEqual(CONF.ldap.use_tls, res.ldap.use_tls)
- self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope)
diff --git a/keystone-moon/keystone/tests/unit/identity_mapping.py b/keystone-moon/keystone/tests/unit/identity_mapping.py
deleted file mode 100644
index 4ba4f0c2..00000000
--- a/keystone-moon/keystone/tests/unit/identity_mapping.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from keystone.common import sql
-from keystone.identity.mapping_backends import sql as mapping_sql
-
-
-def list_id_mappings():
- """List all id_mappings for testing purposes."""
- with sql.session_for_read() as session:
- refs = session.query(mapping_sql.IDMapping).all()
- return [x.to_dict() for x in refs]
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py b/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py
deleted file mode 100644
index 4b914752..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from keystone.tests.unit.ksfixtures.auth_plugins import ConfigAuthPlugins # noqa
-from keystone.tests.unit.ksfixtures.cache import Cache # noqa
-from keystone.tests.unit.ksfixtures.key_repository import KeyRepository # noqa
-from keystone.tests.unit.ksfixtures.policy import Policy # noqa
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py b/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py
deleted file mode 100644
index a23b804f..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-import fixtures
-from oslo_config import cfg
-from paste import deploy
-
-from keystone.common import environment
-
-
-CONF = cfg.CONF
-
-MAIN = 'main'
-ADMIN = 'admin'
-
-
-class AppServer(fixtures.Fixture):
- """A fixture for managing an application server instance."""
-
- def __init__(self, config, name, cert=None, key=None, ca=None,
- cert_required=False, host='127.0.0.1', port=0):
- super(AppServer, self).__init__()
- self.config = config
- self.name = name
- self.cert = cert
- self.key = key
- self.ca = ca
- self.cert_required = cert_required
- self.host = host
- self.port = port
-
- def setUp(self):
- super(AppServer, self).setUp()
-
- app = deploy.loadapp(self.config, name=self.name)
- self.server = environment.Server(app, self.host, self.port)
- self._setup_SSL_if_requested()
- self.server.start(key='socket')
-
- # some tests need to know the port we ran on.
- self.port = self.server.socket_info['socket'][1]
- self._update_config_opt()
-
- self.addCleanup(self.server.stop)
-
- def _setup_SSL_if_requested(self):
- # TODO(dstanek): fix environment.Server to take a SSLOpts instance
- # so that the params are either always set or not
- if (self.cert is not None and
- self.ca is not None and
- self.key is not None):
- self.server.set_ssl(certfile=self.cert,
- keyfile=self.key,
- ca_certs=self.ca,
- cert_required=self.cert_required)
-
- def _update_config_opt(self):
- """Updates the config with the actual port used."""
- opt_name = self._get_config_option_for_section_name()
- CONF.set_override(opt_name, self.port, group='eventlet_server',
- enforce_type=True)
-
- def _get_config_option_for_section_name(self):
- """Maps Paster config section names to port option names."""
- return {'admin': 'admin_port', 'main': 'public_port'}[self.name]
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py b/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py
deleted file mode 100644
index 68ba6f3a..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import fixtures
-
-from keystone.common import config as common_cfg
-
-
-class ConfigAuthPlugins(fixtures.Fixture):
- """A fixture for setting up and tearing down a auth plugins."""
-
- def __init__(self, config_fixture, methods, **method_classes):
- super(ConfigAuthPlugins, self).__init__()
- self.methods = methods
- self.config_fixture = config_fixture
- self.method_classes = method_classes
-
- def setUp(self):
- super(ConfigAuthPlugins, self).setUp()
- if self.methods:
- self.config_fixture.config(group='auth', methods=self.methods)
- common_cfg.setup_authentication()
- if self.method_classes:
- self.config_fixture.config(group='auth', **self.method_classes)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/cache.py b/keystone-moon/keystone/tests/unit/ksfixtures/cache.py
deleted file mode 100644
index e0833ae2..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/cache.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import fixtures
-
-from keystone import catalog
-from keystone.common import cache
-
-
-CACHE_REGIONS = (cache.CACHE_REGION, catalog.COMPUTED_CATALOG_REGION)
-
-
-class Cache(fixtures.Fixture):
- """A fixture for setting up the cache between test cases.
-
- This will also tear down an existing cache if one is already configured.
- """
-
- def setUp(self):
- super(Cache, self).setUp()
-
- # NOTE(dstanek): We must remove the existing cache backend in the
- # setUp instead of the tearDown because it defaults to a no-op cache
- # and we want the configure call below to create the correct backend.
-
- # NOTE(morganfainberg): The only way to reconfigure the CacheRegion
- # object on each setUp() call is to remove the .backend property.
- for region in CACHE_REGIONS:
- if region.is_configured:
- del region.backend
-
- # ensure the cache region instance is setup
- cache.configure_cache(region=region)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/database.py b/keystone-moon/keystone/tests/unit/ksfixtures/database.py
deleted file mode 100644
index 52c35cee..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/database.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import os
-
-import fixtures
-from oslo_config import cfg
-from oslo_db import options as db_options
-
-from keystone.common import sql
-from keystone.tests import unit
-
-
-CONF = cfg.CONF
-
-
-def run_once(f):
- """A decorator to ensure the decorated function is only executed once.
-
- The decorated function is assumed to have a one parameter.
-
- """
- @functools.wraps(f)
- def wrapper(one):
- if not wrapper.already_ran:
- f(one)
- wrapper.already_ran = True
- wrapper.already_ran = False
- return wrapper
-
-
-# NOTE(I159): Every execution all the options will be cleared. The method must
-# be called at the every fixture initialization.
-def initialize_sql_session():
- # Make sure the DB is located in the correct location, in this case set
- # the default value, as this should be able to be overridden in some
- # test cases.
- db_options.set_defaults(
- CONF,
- connection=unit.IN_MEM_DB_CONN_STRING)
-
-
-@run_once
-def _load_sqlalchemy_models(version_specifiers):
- """Find all modules containing SQLAlchemy models and import them.
-
- This creates more consistent, deterministic test runs because tables
- for all core and extension models are always created in the test
- database. We ensure this by importing all modules that contain model
- definitions.
-
- The database schema during test runs is created using reflection.
- Reflection is simply SQLAlchemy taking the model definitions for
- all models currently imported and making tables for each of them.
- The database schema created during test runs may vary between tests
- as more models are imported. Importing all models at the start of
- the test run avoids this problem.
-
- version_specifiers is a dict that contains any specific driver versions
- that have been requested. The dict is of the form:
-
- {<module_name> : {'versioned_backend' : <name of backend requested>,
- 'versionless_backend' : <name of default backend>}
- }
-
- For example:
-
- {'keystone.assignment': {'versioned_backend' : 'V8_backends',
- 'versionless_backend' : 'backends'},
- 'keystone.identity': {'versioned_backend' : 'V9_backends',
- 'versionless_backend' : 'backends'}
- }
-
- The version_specifiers will be used to load the correct driver. The
- algorithm for this assumes that versioned drivers begin in 'V'.
-
- """
- keystone_root = os.path.normpath(os.path.join(
- os.path.dirname(__file__), '..', '..', '..'))
- for root, dirs, files in os.walk(keystone_root):
- # NOTE(morganfainberg): Slice the keystone_root off the root to ensure
- # we do not end up with a module name like:
- # Users.home.openstack.keystone.assignment.backends.sql
- root = root[len(keystone_root):]
- if root.endswith('backends') and 'sql.py' in files:
- # The root will be prefixed with an instance of os.sep, which will
- # make the root after replacement '.<root>', the 'keystone' part
- # of the module path is always added to the front
- module_root = ('keystone.%s' %
- root.replace(os.sep, '.').lstrip('.'))
- module_components = module_root.split('.')
- module_without_backends = ''
- for x in range(0, len(module_components) - 1):
- module_without_backends += module_components[x] + '.'
- module_without_backends = module_without_backends.rstrip('.')
- this_backend = module_components[len(module_components) - 1]
-
- # At this point module_without_backends might be something like
- # 'keystone.assignment', while this_backend might be something
- # 'V8_backends'.
-
- if module_without_backends.startswith('keystone.contrib'):
- # All the sql modules have now been moved into the core tree
- # so no point in loading these again here (and, in fact, doing
- # so might break trying to load a versioned driver.
- continue
-
- if module_without_backends in version_specifiers:
- # OK, so there is a request for a specific version of this one.
- # We therefore should skip any other versioned backend as well
- # as the non-versioned one.
- version = version_specifiers[module_without_backends]
- if ((this_backend != version['versioned_backend'] and
- this_backend.startswith('V')) or
- this_backend == version['versionless_backend']):
- continue
- else:
- # No versioned driver requested, so ignore any that are
- # versioned
- if this_backend.startswith('V'):
- continue
-
- module_name = module_root + '.sql'
- __import__(module_name)
-
-
-class Database(fixtures.Fixture):
- """A fixture for setting up and tearing down a database."""
-
- def __init__(self, version_specifiers=None):
- super(Database, self).__init__()
- initialize_sql_session()
- if version_specifiers is None:
- version_specifiers = {}
- _load_sqlalchemy_models(version_specifiers)
-
- def setUp(self):
- super(Database, self).setUp()
-
- with sql.session_for_write() as session:
- self.engine = session.get_bind()
- self.addCleanup(sql.cleanup)
- sql.ModelBase.metadata.create_all(bind=self.engine)
- self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
-
- def recreate(self):
- sql.ModelBase.metadata.create_all(bind=self.engine)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
deleted file mode 100644
index 9977b206..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
+++ /dev/null
@@ -1,417 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# NOTE(morganfainberg) This file shouldn't have flake8 run on it as it has
-# code examples that will fail normal CI pep8/flake8 tests. This is expected.
-# The code has been moved here to ensure that proper tests occur on the
-# test_hacking_checks test cases.
-# flake8: noqa
-
-import fixtures
-
-
-class HackingCode(fixtures.Fixture):
- """A fixture to house the various code examples for the keystone hacking
- style checks.
- """
-
- mutable_default_args = {
- 'code': """
- def f():
- pass
-
- def f(a, b='', c=None):
- pass
-
- def f(bad=[]):
- pass
-
- def f(foo, bad=[], more_bad=[x for x in range(3)]):
- pass
-
- def f(foo, bad={}):
- pass
-
- def f(foo, bad={}, another_bad=[], fine=None):
- pass
-
- def f(bad=[]): # noqa
- pass
-
- def funcs(bad=dict(), more_bad=list(), even_more_bad=set()):
- "creating mutables through builtins"
-
- def funcs(bad=something(), more_bad=some_object.something()):
- "defaults from any functions"
-
- def f(bad=set(), more_bad={x for x in range(3)},
- even_more_bad={1, 2, 3}):
- "set and set comprehession"
-
- def f(bad={x: x for x in range(3)}):
- "dict comprehension"
- """,
- 'expected_errors': [
- (7, 10, 'K001'),
- (10, 15, 'K001'),
- (10, 29, 'K001'),
- (13, 15, 'K001'),
- (16, 15, 'K001'),
- (16, 31, 'K001'),
- (22, 14, 'K001'),
- (22, 31, 'K001'),
- (22, 53, 'K001'),
- (25, 14, 'K001'),
- (25, 36, 'K001'),
- (28, 10, 'K001'),
- (28, 27, 'K001'),
- (29, 21, 'K001'),
- (32, 11, 'K001'),
- ]}
-
- comments_begin_with_space = {
- 'code': """
- # This is a good comment
-
- #This is a bad one
-
- # This is alright and can
- # be continued with extra indentation
- # if that's what the developer wants.
- """,
- 'expected_errors': [
- (3, 0, 'K002'),
- ]}
-
- asserting_none_equality = {
- 'code': """
- class Test(object):
-
- def test(self):
- self.assertEqual('', '')
- self.assertEqual('', None)
- self.assertEqual(None, '')
- self.assertNotEqual('', None)
- self.assertNotEqual(None, '')
- self.assertNotEqual('', None) # noqa
- self.assertNotEqual(None, '') # noqa
- """,
- 'expected_errors': [
- (5, 8, 'K003'),
- (6, 8, 'K003'),
- (7, 8, 'K004'),
- (8, 8, 'K004'),
- ]}
-
- dict_constructor = {
- 'code': """
- lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
- fool = dict(a='a', b='b')
- lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1]))
- attrs = dict([(k, _from_json(v))])
- dict([[i,i] for i in range(3)])
- dict(({1:2}))
- """,
- 'expected_errors': [
- (3, 0, 'K008'),
- (4, 0, 'K008'),
- (5, 0, 'K008'),
- ]}
-
-
-class HackingLogging(fixtures.Fixture):
-
- shared_imports = """
- import logging
- import logging as stlib_logging
- from keystone.i18n import _
- from keystone.i18n import _ as oslo_i18n
- from keystone.i18n import _LC
- from keystone.i18n import _LE
- from keystone.i18n import _LE as error_hint
- from keystone.i18n import _LI
- from keystone.i18n import _LW
- from oslo_log import log
- from oslo_log import log as oslo_logging
- """
-
- examples = [
- {
- 'code': """
- # stdlib logging
- LOG = logging.getLogger()
- LOG.info(_('text'))
- class C:
- def __init__(self):
- LOG.warning(oslo_i18n('text', {}))
- LOG.warning(_LW('text', {}))
- """,
- 'expected_errors': [
- (3, 9, 'K006'),
- (6, 20, 'K006'),
- ],
- },
- {
- 'code': """
- # stdlib logging w/ alias and specifying a logger
- class C:
- def __init__(self):
- self.L = logging.getLogger(__name__)
- def m(self):
- self.L.warning(
- _('text'), {}
- )
- self.L.warning(
- _LW('text'), {}
- )
- """,
- 'expected_errors': [
- (7, 12, 'K006'),
- ],
- },
- {
- 'code': """
- # oslo logging and specifying a logger
- L = log.getLogger(__name__)
- L.error(oslo_i18n('text'))
- L.error(error_hint('text'))
- """,
- 'expected_errors': [
- (3, 8, 'K006'),
- ],
- },
- {
- 'code': """
- # oslo logging w/ alias
- class C:
- def __init__(self):
- self.LOG = oslo_logging.getLogger()
- self.LOG.critical(_('text'))
- self.LOG.critical(_LC('text'))
- """,
- 'expected_errors': [
- (5, 26, 'K006'),
- ],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- # translation on a separate line
- msg = _('text')
- LOG.exception(msg)
- msg = _LE('text')
- LOG.exception(msg)
- """,
- 'expected_errors': [
- (4, 14, 'K006'),
- ],
- },
- {
- 'code': """
- LOG = logging.getLogger()
-
- # ensure the correct helper is being used
- LOG.warning(_LI('this should cause an error'))
-
- # debug should not allow any helpers either
- LOG.debug(_LI('this should cause an error'))
- """,
- 'expected_errors': [
- (4, 12, 'K006'),
- (7, 10, 'K005'),
- ],
- },
- {
- 'code': """
- # this should not be an error
- L = log.getLogger(__name__)
- msg = _('text')
- L.warning(msg)
- raise Exception(msg)
- """,
- 'expected_errors': [],
- },
- {
- 'code': """
- L = log.getLogger(__name__)
- def f():
- msg = _('text')
- L2.warning(msg)
- something = True # add an extra statement here
- raise Exception(msg)
- """,
- 'expected_errors': [],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- def func():
- msg = _('text')
- LOG.warning(msg)
- raise Exception('some other message')
- """,
- 'expected_errors': [
- (4, 16, 'K006'),
- ],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- if True:
- msg = _('text')
- else:
- msg = _('text')
- LOG.warning(msg)
- raise Exception(msg)
- """,
- 'expected_errors': [
- ],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- if True:
- msg = _('text')
- else:
- msg = _('text')
- LOG.warning(msg)
- """,
- 'expected_errors': [
- (6, 12, 'K006'),
- ],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- msg = _LW('text')
- LOG.warning(msg)
- raise Exception(msg)
- """,
- 'expected_errors': [
- (3, 12, 'K007'),
- ],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- msg = _LW('text')
- LOG.warning(msg)
- msg = _('something else')
- raise Exception(msg)
- """,
- 'expected_errors': [],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- msg = _LW('hello %s') % 'world'
- LOG.warning(msg)
- raise Exception(msg)
- """,
- 'expected_errors': [
- (3, 12, 'K007'),
- ],
- },
- {
- 'code': """
- LOG = log.getLogger(__name__)
- msg = _LW('hello %s') % 'world'
- LOG.warning(msg)
- """,
- 'expected_errors': [],
- },
- {
- 'code': """
- # this should not be an error
- LOG = log.getLogger(__name__)
- try:
- something = True
- except AssertionError as e:
- LOG.warning(six.text_type(e))
- raise exception.Unauthorized(e)
- """,
- 'expected_errors': [],
- },
- ]
-
- assert_not_using_deprecated_warn = {
- 'code': """
- # Logger.warn has been deprecated in Python3 in favor of
- # Logger.warning
- LOG = log.getLogger(__name__)
- LOG.warn(_LW('text'))
- """,
- 'expected_errors': [
- (4, 9, 'K009'),
- ],
- }
-
- assert_no_translations_for_debug_logging = {
- 'code': """
- # stdlib logging
- L0 = logging.getLogger()
- L0.debug(_('text'))
- class C:
- def __init__(self):
- L0.debug(oslo_i18n('text', {}))
-
- # stdlib logging w/ alias and specifying a logger
- class C:
- def __init__(self):
- self.L1 = logging.getLogger(__name__)
- def m(self):
- self.L1.debug(
- _('text'), {}
- )
-
- # oslo logging and specifying a logger
- L2 = logging.getLogger(__name__)
- L2.debug(oslo_i18n('text'))
-
- # oslo logging w/ alias
- class C:
- def __init__(self):
- self.L3 = oslo_logging.getLogger()
- self.L3.debug(_('text'))
-
- # translation on a separate line
- msg = _('text')
- L2.debug(msg)
-
- # this should not fail
- if True:
- msg = _('message %s') % X
- L2.error(msg)
- raise TypeError(msg)
- if True:
- msg = 'message'
- L2.debug(msg)
-
- # this should not fail
- if True:
- if True:
- msg = _('message')
- else:
- msg = _('message')
- L2.debug(msg)
- raise Exception(msg)
- """,
- 'expected_errors': [
- (3, 9, 'K005'),
- (6, 17, 'K005'),
- (14, 12, 'K005'),
- (19, 9, 'K005'),
- (25, 22, 'K005'),
- (29, 9, 'K005'),
- ]
- }
-
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py b/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py
deleted file mode 100644
index 7784bddc..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import fixtures
-
-from keystone.token.providers.fernet import utils
-
-
-class KeyRepository(fixtures.Fixture):
- def __init__(self, config_fixture):
- super(KeyRepository, self).__init__()
- self.config_fixture = config_fixture
-
- def setUp(self):
- super(KeyRepository, self).setUp()
- directory = self.useFixture(fixtures.TempDir()).path
- self.config_fixture.config(group='fernet_tokens',
- key_repository=directory)
-
- utils.create_key_directory()
- utils.initialize_key_repository()
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py b/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py
deleted file mode 100644
index 6cd8cc0b..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import fixtures
-
-from keystone.common import ldap as common_ldap
-from keystone.common.ldap import core as common_ldap_core
-from keystone.tests.unit import fakeldap
-
-
-class LDAPDatabase(fixtures.Fixture):
- """A fixture for setting up and tearing down an LDAP database."""
-
- def setUp(self):
- super(LDAPDatabase, self).setUp()
- self.clear()
- common_ldap_core._HANDLERS.clear()
- common_ldap.register_handler('fake://', fakeldap.FakeLdap)
- # TODO(dstanek): switch the flow here
- self.addCleanup(self.clear)
- self.addCleanup(common_ldap_core._HANDLERS.clear)
-
- def clear(self):
- for shelf in fakeldap.FakeShelves:
- fakeldap.FakeShelves[shelf].clear()
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/policy.py b/keystone-moon/keystone/tests/unit/ksfixtures/policy.py
deleted file mode 100644
index b883f980..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/policy.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import fixtures
-from oslo_policy import opts
-
-from keystone.policy.backends import rules
-
-
-class Policy(fixtures.Fixture):
- """A fixture for working with policy configuration."""
-
- def __init__(self, policy_file, config_fixture):
- self._policy_file = policy_file
- self._config_fixture = config_fixture
-
- def setUp(self):
- super(Policy, self).setUp()
- opts.set_defaults(self._config_fixture.conf)
- self._config_fixture.config(group='oslo_policy',
- policy_file=self._policy_file)
- rules.init()
- self.addCleanup(rules.reset)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py b/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py
deleted file mode 100644
index a4be06f8..00000000
--- a/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import tempfile
-
-import fixtures
-
-
-class SecureTempFile(fixtures.Fixture):
- """A fixture for creating a secure temp file."""
-
- def setUp(self):
- super(SecureTempFile, self).setUp()
-
- _fd, self.file_name = tempfile.mkstemp()
- # Make sure no file descriptors are leaked, close the unused FD.
- os.close(_fd)
- self.addCleanup(os.remove, self.file_name)
diff --git a/keystone-moon/keystone/tests/unit/mapping_fixtures.py b/keystone-moon/keystone/tests/unit/mapping_fixtures.py
deleted file mode 100644
index 9dc980aa..00000000
--- a/keystone-moon/keystone/tests/unit/mapping_fixtures.py
+++ /dev/null
@@ -1,1486 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Fixtures for Federation Mapping."""
-
-from six.moves import range, zip
-
-
-EMPLOYEE_GROUP_ID = "0cd5e9"
-CONTRACTOR_GROUP_ID = "85a868"
-TESTER_GROUP_ID = "123"
-TESTER_GROUP_NAME = "tester"
-DEVELOPER_GROUP_ID = "xyz"
-DEVELOPER_GROUP_NAME = "Developer"
-CONTRACTOR_GROUP_NAME = "Contractor"
-DEVELOPER_GROUP_DOMAIN_NAME = "outsourcing"
-DEVELOPER_GROUP_DOMAIN_ID = "5abc43"
-FEDERATED_DOMAIN = "Federated"
-LOCAL_DOMAIN = "Local"
-
-# Mapping summary:
-# LastName Smith & Not Contractor or SubContractor -> group 0cd5e9
-# FirstName Jill & Contractor or SubContractor -> to group 85a868
-MAPPING_SMALL = {
- "rules": [
- {
- "local": [
- {
- "group": {
- "id": EMPLOYEE_GROUP_ID
- }
- },
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "not_any_of": [
- "Contractor",
- "SubContractor"
- ]
- },
- {
- "type": "LastName",
- "any_one_of": [
- "Bo"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "group": {
- "id": CONTRACTOR_GROUP_ID
- }
- },
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "any_one_of": [
- "Contractor",
- "SubContractor"
- ]
- },
- {
- "type": "FirstName",
- "any_one_of": [
- "Jill"
- ]
- }
- ]
- }
- ]
-}
-
-# Mapping summary:
-# orgPersonType Admin or Big Cheese -> name {0} {1} email {2} and group 0cd5e9
-# orgPersonType Customer -> user name {0} email {1}
-# orgPersonType Test and email ^@example.com$ -> group 123 and xyz
-MAPPING_LARGE = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0} {1}",
- "email": "{2}"
- },
- "group": {
- "id": EMPLOYEE_GROUP_ID
- }
- }
- ],
- "remote": [
- {
- "type": "FirstName"
- },
- {
- "type": "LastName"
- },
- {
- "type": "Email"
- },
- {
- "type": "orgPersonType",
- "any_one_of": [
- "Admin",
- "Big Cheese"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "email": "{1}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "Email"
- },
- {
- "type": "orgPersonType",
- "not_any_of": [
- "Admin",
- "Employee",
- "Contractor",
- "Tester"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "group": {
- "id": TESTER_GROUP_ID
- }
- },
- {
- "group": {
- "id": DEVELOPER_GROUP_ID
- }
- },
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "any_one_of": [
- "Tester"
- ]
- },
- {
- "type": "Email",
- "any_one_of": [
- ".*@example.com$"
- ],
- "regex": True
- }
- ]
- }
- ]
-}
-
-MAPPING_BAD_REQ = {
- "rules": [
- {
- "local": [
- {
- "user": "name"
- }
- ],
- "remote": [
- {
- "type": "UserName",
- "bad_requirement": [
- "Young"
- ]
- }
- ]
- }
- ]
-}
-
-MAPPING_BAD_VALUE = {
- "rules": [
- {
- "local": [
- {
- "user": "name"
- }
- ],
- "remote": [
- {
- "type": "UserName",
- "any_one_of": "should_be_list"
- }
- ]
- }
- ]
-}
-
-MAPPING_NO_RULES = {
- 'rules': []
-}
-
-MAPPING_NO_REMOTE = {
- "rules": [
- {
- "local": [
- {
- "user": "name"
- }
- ],
- "remote": []
- }
- ]
-}
-
-MAPPING_MISSING_LOCAL = {
- "rules": [
- {
- "remote": [
- {
- "type": "UserName",
- "any_one_of": "should_be_list"
- }
- ]
- }
- ]
-}
-
-MAPPING_WRONG_TYPE = {
- "rules": [
- {
- "local": [
- {
- "user": "{1}"
- }
- ],
- "remote": [
- {
- "not_type": "UserName"
- }
- ]
- }
- ]
-}
-
-MAPPING_MISSING_TYPE = {
- "rules": [
- {
- "local": [
- {
- "user": "{1}"
- }
- ],
- "remote": [
- {}
- ]
- }
- ]
-}
-
-MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF = {
- "rules": [
- {
- "local": [
- {
- "group": {
- "id": "0cd5e9"
- }
- },
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "not_any_of": [
- "SubContractor"
- ],
- "invalid_type": "xyz"
- }
- ]
- }
- ]
-}
-
-MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF = {
- "rules": [
- {
- "local": [
- {
- "group": {
- "id": "0cd5e9"
- }
- },
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "any_one_of": [
- "SubContractor"
- ],
- "invalid_type": "xyz"
- }
- ]
- }
- ]
-}
-
-MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE = {
- "rules": [
- {
- "local": [
- {
- "group": {
- "id": "0cd5e9"
- }
- },
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "invalid_type": "xyz"
- }
- ]
- }
- ]
-}
-
-MAPPING_EXTRA_RULES_PROPS = {
- "rules": [
- {
- "local": [
- {
- "group": {
- "id": "0cd5e9"
- }
- },
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "invalid_type": {
- "id": "xyz",
- },
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "not_any_of": [
- "SubContractor"
- ]
- }
- ]
- }
- ]
-}
-
-MAPPING_TESTER_REGEX = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- }
- ]
- },
- {
- "local": [
- {
- "group": {
- "id": TESTER_GROUP_ID
- }
- }
- ],
- "remote": [
- {
- "type": "orgPersonType",
- "any_one_of": [
- ".*Tester*"
- ],
- "regex": True
- }
- ]
- }
- ]
-}
-
-
-MAPPING_DIRECT_MAPPING_THROUGH_KEYWORD = {
- "rules": [
- {
- "local": [
- {
- "user": "{0}"
- },
- {
- "group": TESTER_GROUP_ID
- }
- ],
- "remote": [
- {
- "type": "UserName",
- "any_one_of": [
- "bwilliams"
- ]
- }
- ]
- }
- ]
-}
-
-MAPPING_DEVELOPER_REGEX = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- },
- "group": {
- "id": DEVELOPER_GROUP_ID
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "orgPersonType",
- "any_one_of": [
- "Developer"
- ],
- },
- {
- "type": "Email",
- "not_any_of": [
- ".*@example.org$"
- ],
- "regex": True
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUP_NAMES = {
-
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- }
- ]
- },
- {
- "local": [
- {
- "group": {
- "name": DEVELOPER_GROUP_NAME,
- "domain": {
- "name": DEVELOPER_GROUP_DOMAIN_NAME
- }
- }
- }
- ],
- "remote": [
- {
- "type": "orgPersonType",
- "any_one_of": [
- "Employee"
- ],
- }
- ]
- },
- {
- "local": [
- {
- "group": {
- "name": TESTER_GROUP_NAME,
- "domain": {
- "id": DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- ],
- "remote": [
- {
- "type": "orgPersonType",
- "any_one_of": [
- "BuildingX"
- ]
- }
- ]
- },
- ]
-}
-
-MAPPING_EPHEMERAL_USER = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "domain": {
- "id": FEDERATED_DOMAIN
- },
- "type": "ephemeral"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "UserName",
- "any_one_of": [
- "tbo"
- ]
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_WHITELIST = {
- "rules": [
- {
- "remote": [
- {
- "type": "orgPersonType",
- "whitelist": [
- "Developer", "Contractor"
- ]
- },
- {
- "type": "UserName"
- }
- ],
- "local": [
- {
- "groups": "{0}",
- "domain": {
- "id": DEVELOPER_GROUP_DOMAIN_ID
- }
- },
- {
- "user": {
- "name": "{1}"
- }
- }
- ]
- }
- ]
-}
-
-MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "domain": {
- "id": LOCAL_DOMAIN
- },
- "type": "ephemeral"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "UserName",
- "any_one_of": [
- "jsmith"
- ]
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN = {
- "rules": [
- {
- "remote": [
- {
- "type": "orgPersonType",
- "whitelist": [
- "Developer", "Contractor"
- ]
- },
- ],
- "local": [
- {
- "groups": "{0}",
- }
- ]
- }
- ]
-}
-
-MAPPING_LOCAL_USER_LOCAL_DOMAIN = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "domain": {
- "id": LOCAL_DOMAIN
- },
- "type": "local"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "UserName",
- "any_one_of": [
- "jsmith"
- ]
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_BLACKLIST_MULTIPLES = {
- "rules": [
- {
- "remote": [
- {
- "type": "orgPersonType",
- "blacklist": [
- "Developer", "Manager"
- ]
- },
- {
- "type": "Thing" # this could be variable length!
- },
- {
- "type": "UserName"
- },
- ],
- "local": [
- {
- "groups": "{0}",
- "domain": {
- "id": DEVELOPER_GROUP_DOMAIN_ID
- }
- },
- {
- "user": {
- "name": "{2}",
- }
- }
- ]
- }
- ]
-}
-MAPPING_GROUPS_BLACKLIST = {
- "rules": [
- {
- "remote": [
- {
- "type": "orgPersonType",
- "blacklist": [
- "Developer", "Manager"
- ]
- },
- {
- "type": "UserName"
- }
- ],
- "local": [
- {
- "groups": "{0}",
- "domain": {
- "id": DEVELOPER_GROUP_DOMAIN_ID
- }
- },
- {
- "user": {
- "name": "{1}"
- }
- }
- ]
- }
- ]
-}
-
-# Exercise all possibilities of user identification. Values are hardcoded on
-# purpose.
-MAPPING_USER_IDS = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "UserName",
- "any_one_of": [
- "jsmith"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "id": "abc123@example.com",
- "domain": {
- "id": "federated"
- }
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "UserName",
- "any_one_of": [
- "tbo"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "user": {
- "id": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "UserName",
- "any_one_of": [
- "bob"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "user": {
- "id": "abc123@example.com",
- "name": "{0}",
- "domain": {
- "id": "federated"
- }
- }
- }
- ],
- "remote": [
- {
- "type": "UserName"
- },
- {
- "type": "UserName",
- "any_one_of": [
- "bwilliams"
- ]
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN = {
- "rules": [
- {
- "remote": [
- {
- "type": "orgPersonType",
- "blacklist": [
- "Developer", "Manager"
- ]
- },
- ],
- "local": [
- {
- "groups": "{0}",
- },
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_WHITELIST_AND_BLACKLIST = {
- "rules": [
- {
- "remote": [
- {
- "type": "orgPersonType",
- "blacklist": [
- "Employee"
- ],
- "whitelist": [
- "Contractor"
- ]
- },
- ],
- "local": [
- {
- "groups": "{0}",
- "domain": {
- "id": DEVELOPER_GROUP_DOMAIN_ID
- }
- },
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the user_name
-# and domain_name.
-MAPPING_WITH_USERNAME_AND_DOMAINNAME = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'name': '{0}',
- 'domain': {
- 'name': '{1}'
- },
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_NAME'
- },
- {
- 'type': 'SSL_CLIENT_DOMAIN_NAME'
- }
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the user_id
-# and domain_name.
-MAPPING_WITH_USERID_AND_DOMAINNAME = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'id': '{0}',
- 'domain': {
- 'name': '{1}'
- },
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_ID'
- },
- {
- 'type': 'SSL_CLIENT_DOMAIN_NAME'
- }
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the user_name
-# and domain_id.
-MAPPING_WITH_USERNAME_AND_DOMAINID = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'name': '{0}',
- 'domain': {
- 'id': '{1}'
- },
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_NAME'
- },
- {
- 'type': 'SSL_CLIENT_DOMAIN_ID'
- }
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the user_id
-# and domain_id.
-MAPPING_WITH_USERID_AND_DOMAINID = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'id': '{0}',
- 'domain': {
- 'id': '{1}'
- },
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_ID'
- },
- {
- 'type': 'SSL_CLIENT_DOMAIN_ID'
- }
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the domain_id only.
-MAPPING_WITH_DOMAINID_ONLY = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'domain': {
- 'id': '{0}'
- },
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_DOMAIN_ID'
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_IDS_WHITELIST = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}"
- }
- },
- {
- "group_ids": "{1}"
- },
- {
- "group": {
- "id": "{2}"
- }
- }
- ],
- "remote": [
- {
- "type": "name"
- },
- {
- "type": "group_ids",
- "whitelist": [
- "abc123", "ghi789", "321cba"
- ]
- },
- {
- "type": "group"
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_IDS_BLACKLIST = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}"
- }
- },
- {
- "group_ids": "{1}"
- },
- {
- "group": {
- "id": "{2}"
- }
- }
- ],
- "remote": [
- {
- "type": "name"
- },
- {
- "type": "group_ids",
- "blacklist": [
- "def456"
- ]
- },
- {
- "type": "group"
- }
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the domain_name only.
-MAPPING_WITH_DOMAINNAME_ONLY = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'domain': {
- 'name': '{0}'
- },
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_DOMAIN_NAME'
- }
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the user_name only.
-MAPPING_WITH_USERNAME_ONLY = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'name': '{0}',
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_NAME'
- }
- ]
- }
- ]
-}
-
-# Mapping used by tokenless test cases, it maps the user_id only.
-MAPPING_WITH_USERID_ONLY = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'id': '{0}',
- 'type': 'local'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_ID'
- }
- ]
- }
- ]
-}
-
-MAPPING_FOR_EPHEMERAL_USER = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'name': '{0}',
- 'type': 'ephemeral'
- },
- 'group': {
- 'id': 'dummy'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_NAME'
- }
- ]
- }
- ]
-}
-
-MAPPING_FOR_DEFAULT_EPHEMERAL_USER = {
- 'rules': [
- {
- 'local': [
- {
- 'user': {
- 'name': '{0}'
- },
- 'group': {
- 'id': 'dummy'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'SSL_CLIENT_USER_NAME'
- }
- ]
- }
- ]
-}
-
-MAPPING_GROUPS_WHITELIST_PASS_THROUGH = {
- "rules": [
- {
- "remote": [
- {
- "type": "UserName"
- }
- ],
- "local": [
- {
- "user": {
- "name": "{0}",
- "domain": {
- "id": DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- }
- ]
- },
- {
- "remote": [
- {
- "type": "orgPersonType",
- "whitelist": ['Developer']
- }
- ],
- "local": [
- {
- "groups": "{0}",
- "domain": {
- "id": DEVELOPER_GROUP_DOMAIN_ID
- }
- }
- ]
- }
- ]
-}
-
-MAPPING_BAD_LOCAL_SETUP = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "domain": {"id": "default"}
- },
- "whatisthis": "local"
- }
- ],
- "remote": [
- {
- "type": "UserName"
- }
- ]
- }
- ]
-}
-
-EMPLOYEE_ASSERTION = {
- 'Email': 'tim@example.com',
- 'UserName': 'tbo',
- 'FirstName': 'Tim',
- 'LastName': 'Bo',
- 'orgPersonType': 'Employee;BuildingX'
-}
-
-EMPLOYEE_ASSERTION_MULTIPLE_GROUPS = {
- 'Email': 'tim@example.com',
- 'UserName': 'tbo',
- 'FirstName': 'Tim',
- 'LastName': 'Bo',
- 'orgPersonType': 'Developer;Manager;Contractor',
- 'Thing': 'yes!;maybe!;no!!'
-}
-
-EMPLOYEE_ASSERTION_PREFIXED = {
- 'PREFIX_Email': 'tim@example.com',
- 'PREFIX_UserName': 'tbo',
- 'PREFIX_FirstName': 'Tim',
- 'PREFIX_LastName': 'Bo',
- 'PREFIX_orgPersonType': 'SuperEmployee;BuildingX'
-}
-
-CONTRACTOR_ASSERTION = {
- 'Email': 'jill@example.com',
- 'UserName': 'jsmith',
- 'FirstName': 'Jill',
- 'LastName': 'Smith',
- 'orgPersonType': 'Contractor;Non-Dev'
-}
-
-ADMIN_ASSERTION = {
- 'Email': 'bob@example.com',
- 'UserName': 'bob',
- 'FirstName': 'Bob',
- 'LastName': 'Thompson',
- 'orgPersonType': 'Admin;Chief'
-}
-
-CUSTOMER_ASSERTION = {
- 'Email': 'beth@example.com',
- 'UserName': 'bwilliams',
- 'FirstName': 'Beth',
- 'LastName': 'Williams',
- 'orgPersonType': 'Customer'
-}
-
-ANOTHER_CUSTOMER_ASSERTION = {
- 'Email': 'mark@example.com',
- 'UserName': 'markcol',
- 'FirstName': 'Mark',
- 'LastName': 'Collins',
- 'orgPersonType': 'Managers;CEO;CTO'
-}
-
-TESTER_ASSERTION = {
- 'Email': 'testacct@example.com',
- 'UserName': 'testacct',
- 'FirstName': 'Test',
- 'LastName': 'Account',
- 'orgPersonType': 'MadeupGroup;Tester;GroupX'
-}
-
-ANOTHER_TESTER_ASSERTION = {
- 'Email': 'testacct@example.com',
- 'UserName': 'IamTester'
-}
-
-BAD_TESTER_ASSERTION = {
- 'Email': 'eviltester@example.org',
- 'UserName': 'Evil',
- 'FirstName': 'Test',
- 'LastName': 'Account',
- 'orgPersonType': 'Tester'
-}
-
-BAD_DEVELOPER_ASSERTION = {
- 'Email': 'evildeveloper@example.org',
- 'UserName': 'Evil',
- 'FirstName': 'Develop',
- 'LastName': 'Account',
- 'orgPersonType': 'Developer'
-}
-
-MALFORMED_TESTER_ASSERTION = {
- 'Email': 'testacct@example.com',
- 'UserName': 'testacct',
- 'FirstName': 'Test',
- 'LastName': 'Account',
- 'orgPersonType': 'Tester',
- 'object': object(),
- 'dictionary': dict(zip('teststring', range(10))),
- 'tuple': tuple(range(5))
-}
-
-DEVELOPER_ASSERTION = {
- 'Email': 'developacct@example.com',
- 'UserName': 'developacct',
- 'FirstName': 'Develop',
- 'LastName': 'Account',
- 'orgPersonType': 'Developer'
-}
-
-CONTRACTOR_MALFORMED_ASSERTION = {
- 'UserName': 'user',
- 'FirstName': object(),
- 'orgPersonType': 'Contractor'
-}
-
-LOCAL_USER_ASSERTION = {
- 'UserName': 'marek',
- 'UserType': 'random'
-}
-
-ANOTHER_LOCAL_USER_ASSERTION = {
- 'UserName': 'marek',
- 'Position': 'DirectorGeneral'
-}
-
-UNMATCHED_GROUP_ASSERTION = {
- 'REMOTE_USER': 'Any Momoose',
- 'REMOTE_USER_GROUPS': 'EXISTS;NO_EXISTS'
-}
-
-GROUP_IDS_ASSERTION = {
- 'name': 'opilotte',
- 'group_ids': 'abc123;def456;ghi789',
- 'group': 'klm012'
-}
-
-GROUP_IDS_ASSERTION_ONLY_ONE_GROUP = {
- 'name': 'opilotte',
- 'group_ids': '321cba',
- 'group': '210mlk'
-}
-
-UNICODE_NAME_ASSERTION = {
- 'PFX_Email': 'jon@example.com',
- 'PFX_UserName': 'jonkare',
- 'PFX_FirstName': 'Jon KÃ¥re',
- 'PFX_LastName': 'Hellån',
- 'PFX_orgPersonType': 'Admin;Chief'
-}
-
-MAPPING_UNICODE = {
- "rules": [
- {
- "local": [
- {
- "user": {
- "name": "{0} {1}",
- "email": "{2}"
- },
- "group": {
- "id": EMPLOYEE_GROUP_ID
- }
- }
- ],
- "remote": [
- {
- "type": "PFX_FirstName"
- },
- {
- "type": "PFX_LastName"
- },
- {
- "type": "PFX_Email"
- },
- {
- "type": "PFX_orgPersonType",
- "any_one_of": [
- "Admin",
- "Big Cheese"
- ]
- }
- ]
- },
- ],
-}
diff --git a/keystone-moon/keystone/tests/unit/policy/__init__.py b/keystone-moon/keystone/tests/unit/policy/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/policy/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/policy/test_backends.py b/keystone-moon/keystone/tests/unit/policy/test_backends.py
deleted file mode 100644
index 7b672420..00000000
--- a/keystone-moon/keystone/tests/unit/policy/test_backends.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystone import exception
-from keystone.tests import unit
-
-
-class PolicyTests(object):
- def test_create(self):
- ref = unit.new_policy_ref()
- res = self.policy_api.create_policy(ref['id'], ref)
- self.assertDictEqual(ref, res)
-
- def test_get(self):
- ref = unit.new_policy_ref()
- res = self.policy_api.create_policy(ref['id'], ref)
-
- res = self.policy_api.get_policy(ref['id'])
- self.assertDictEqual(ref, res)
-
- def test_list(self):
- ref = unit.new_policy_ref()
- self.policy_api.create_policy(ref['id'], ref)
-
- res = self.policy_api.list_policies()
- res = [x for x in res if x['id'] == ref['id']][0]
- self.assertDictEqual(ref, res)
-
- def test_update(self):
- ref = unit.new_policy_ref()
- self.policy_api.create_policy(ref['id'], ref)
- orig = ref
-
- ref = unit.new_policy_ref()
-
- # (cannot change policy ID)
- self.assertRaises(exception.ValidationError,
- self.policy_api.update_policy,
- orig['id'],
- ref)
-
- ref['id'] = orig['id']
- res = self.policy_api.update_policy(orig['id'], ref)
- self.assertDictEqual(ref, res)
-
- def test_delete(self):
- ref = unit.new_policy_ref()
- self.policy_api.create_policy(ref['id'], ref)
-
- self.policy_api.delete_policy(ref['id'])
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.delete_policy,
- ref['id'])
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.get_policy,
- ref['id'])
- res = self.policy_api.list_policies()
- self.assertFalse(len([x for x in res if x['id'] == ref['id']]))
-
- def test_get_policy_returns_not_found(self):
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.get_policy,
- uuid.uuid4().hex)
-
- def test_update_policy_returns_not_found(self):
- ref = unit.new_policy_ref()
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.update_policy,
- ref['id'],
- ref)
-
- def test_delete_policy_returns_not_found(self):
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.delete_policy,
- uuid.uuid4().hex)
diff --git a/keystone-moon/keystone/tests/unit/resource/__init__.py b/keystone-moon/keystone/tests/unit/resource/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/resource/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/resource/backends/__init__.py b/keystone-moon/keystone/tests/unit/resource/backends/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/resource/backends/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py b/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py
deleted file mode 100644
index 79ad3df2..00000000
--- a/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.resource.backends import sql
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit.resource import test_backends
-
-
-class TestSqlResourceDriver(unit.BaseTestCase,
- test_backends.ResourceDriverTests):
- def setUp(self):
- super(TestSqlResourceDriver, self).setUp()
- self.useFixture(database.Database())
- self.driver = sql.Resource()
diff --git a/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py b/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py b/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py
deleted file mode 100644
index b4c5f262..00000000
--- a/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from keystone.common import sql
-from keystone.resource.config_backends import sql as config_sql
-from keystone.tests import unit
-from keystone.tests.unit.backend import core_sql
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit.resource import test_core
-
-
-class SqlDomainConfigModels(core_sql.BaseBackendSqlModels):
-
- def test_whitelisted_model(self):
- cols = (('domain_id', sql.String, 64),
- ('group', sql.String, 255),
- ('option', sql.String, 255),
- ('value', sql.JsonBlob, None))
- self.assertExpectedSchema('whitelisted_config', cols)
-
- def test_sensitive_model(self):
- cols = (('domain_id', sql.String, 64),
- ('group', sql.String, 255),
- ('option', sql.String, 255),
- ('value', sql.JsonBlob, None))
- self.assertExpectedSchema('sensitive_config', cols)
-
-
-class SqlDomainConfigDriver(unit.BaseTestCase,
- test_core.DomainConfigDriverTests):
- def setUp(self):
- super(SqlDomainConfigDriver, self).setUp()
- self.useFixture(database.Database())
- self.driver = config_sql.DomainConfig()
-
-
-class SqlDomainConfig(core_sql.BaseBackendSqlTests,
- test_core.DomainConfigTests):
- def setUp(self):
- super(SqlDomainConfig, self).setUp()
- # test_core.DomainConfigTests is effectively a mixin class, so make
- # sure we call its setup
- test_core.DomainConfigTests.setUp(self)
diff --git a/keystone-moon/keystone/tests/unit/resource/test_backends.py b/keystone-moon/keystone/tests/unit/resource/test_backends.py
deleted file mode 100644
index eed4c6ba..00000000
--- a/keystone-moon/keystone/tests/unit/resource/test_backends.py
+++ /dev/null
@@ -1,1669 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-import mock
-from oslo_config import cfg
-from six.moves import range
-from testtools import matchers
-
-from keystone.common import driver_hints
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import utils as test_utils
-
-
-CONF = cfg.CONF
-
-
-class ResourceTests(object):
-
- domain_count = len(default_fixtures.DOMAINS)
-
- def test_get_project(self):
- tenant_ref = self.resource_api.get_project(self.tenant_bar['id'])
- self.assertDictEqual(self.tenant_bar, tenant_ref)
-
- def test_get_project_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- uuid.uuid4().hex)
-
- def test_get_project_by_name(self):
- tenant_ref = self.resource_api.get_project_by_name(
- self.tenant_bar['name'],
- CONF.identity.default_domain_id)
- self.assertDictEqual(self.tenant_bar, tenant_ref)
-
- @unit.skip_if_no_multiple_domains_support
- def test_get_project_by_name_for_project_acting_as_a_domain(self):
- """Tests get_project_by_name works when the domain_id is None."""
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id, is_domain=False)
- project = self.resource_api.create_project(project['id'], project)
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project_by_name,
- project['name'],
- None)
-
- # Test that querying with domain_id as None will find the project
- # acting as a domain, even if it's name is the same as the regular
- # project above.
- project2 = unit.new_project_ref(is_domain=True,
- name=project['name'])
- project2 = self.resource_api.create_project(project2['id'], project2)
-
- project_ref = self.resource_api.get_project_by_name(
- project2['name'], None)
-
- self.assertEqual(project2, project_ref)
-
- def test_get_project_by_name_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project_by_name,
- uuid.uuid4().hex,
- CONF.identity.default_domain_id)
-
- def test_create_duplicate_project_id_fails(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project_id = project['id']
- self.resource_api.create_project(project_id, project)
- project['name'] = 'fake2'
- self.assertRaises(exception.Conflict,
- self.resource_api.create_project,
- project_id,
- project)
-
- def test_create_duplicate_project_name_fails(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project_id = project['id']
- self.resource_api.create_project(project_id, project)
- project['id'] = 'fake2'
- self.assertRaises(exception.Conflict,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_create_duplicate_project_name_in_different_domains(self):
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
- project1 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project2 = unit.new_project_ref(name=project1['name'],
- domain_id=new_domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- self.resource_api.create_project(project2['id'], project2)
-
- def test_move_project_between_domains(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- project = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project['id'], project)
- project['domain_id'] = domain2['id']
- # Update the project asserting that a deprecation warning is emitted
- with mock.patch(
- 'oslo_log.versionutils.report_deprecated_feature') as mock_dep:
- self.resource_api.update_project(project['id'], project)
- self.assertTrue(mock_dep.called)
-
- updated_project_ref = self.resource_api.get_project(project['id'])
- self.assertEqual(domain2['id'], updated_project_ref['domain_id'])
-
- def test_move_project_between_domains_with_clashing_names_fails(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- # First, create a project in domain1
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
- # Now create a project in domain2 with a potentially clashing
- # name - which should work since we have domain separation
- project2 = unit.new_project_ref(name=project1['name'],
- domain_id=domain2['id'])
- self.resource_api.create_project(project2['id'], project2)
- # Now try and move project1 into the 2nd domain - which should
- # fail since the names clash
- project1['domain_id'] = domain2['id']
- self.assertRaises(exception.Conflict,
- self.resource_api.update_project,
- project1['id'],
- project1)
-
- @unit.skip_if_no_multiple_domains_support
- def test_move_project_with_children_between_domains_fails(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- project = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project['id'], project)
- child_project = unit.new_project_ref(domain_id=domain1['id'],
- parent_id=project['id'])
- self.resource_api.create_project(child_project['id'], child_project)
- project['domain_id'] = domain2['id']
-
- # Update is not allowed, since updating the whole subtree would be
- # necessary
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- @unit.skip_if_no_multiple_domains_support
- def test_move_project_not_root_between_domains_fails(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- project = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project['id'], project)
- child_project = unit.new_project_ref(domain_id=domain1['id'],
- parent_id=project['id'])
- self.resource_api.create_project(child_project['id'], child_project)
- child_project['domain_id'] = domain2['id']
-
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- child_project['id'],
- child_project)
-
- @unit.skip_if_no_multiple_domains_support
- def test_move_root_project_between_domains_succeeds(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
- root_project = unit.new_project_ref(domain_id=domain1['id'])
- root_project = self.resource_api.create_project(root_project['id'],
- root_project)
-
- root_project['domain_id'] = domain2['id']
- self.resource_api.update_project(root_project['id'], root_project)
- project_from_db = self.resource_api.get_project(root_project['id'])
-
- self.assertEqual(domain2['id'], project_from_db['domain_id'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_update_domain_id_project_is_domain_fails(self):
- other_domain = unit.new_domain_ref()
- self.resource_api.create_domain(other_domain['id'], other_domain)
- project = unit.new_project_ref(is_domain=True)
- self.resource_api.create_project(project['id'], project)
- project['domain_id'] = other_domain['id']
-
- # Update of domain_id of projects acting as domains is not allowed
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- def test_rename_duplicate_project_name_fails(self):
- project1 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project2 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project1['id'], project1)
- self.resource_api.create_project(project2['id'], project2)
- project2['name'] = project1['name']
- self.assertRaises(exception.Error,
- self.resource_api.update_project,
- project2['id'],
- project2)
-
- def test_update_project_id_does_nothing(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project_id = project['id']
- self.resource_api.create_project(project['id'], project)
- project['id'] = 'fake2'
- self.resource_api.update_project(project_id, project)
- project_ref = self.resource_api.get_project(project_id)
- self.assertEqual(project_id, project_ref['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- 'fake2')
-
- def test_delete_domain_with_user_group_project_links(self):
- # TODO(chungg):add test case once expected behaviour defined
- pass
-
- def test_update_project_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.update_project,
- uuid.uuid4().hex,
- dict())
-
- def test_delete_project_returns_not_found(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.delete_project,
- uuid.uuid4().hex)
-
- def test_create_update_delete_unicode_project(self):
- unicode_project_name = u'name \u540d\u5b57'
- project = unit.new_project_ref(
- name=unicode_project_name,
- domain_id=CONF.identity.default_domain_id)
- project = self.resource_api.create_project(project['id'], project)
- self.resource_api.update_project(project['id'], project)
- self.resource_api.delete_project(project['id'])
-
- def test_create_project_with_no_enabled_field(self):
- ref = unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
- del ref['enabled']
- self.resource_api.create_project(ref['id'], ref)
-
- project = self.resource_api.get_project(ref['id'])
- self.assertIs(project['enabled'], True)
-
- def test_create_project_long_name_fails(self):
- project = unit.new_project_ref(
- name='a' * 65, domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_create_project_blank_name_fails(self):
- project = unit.new_project_ref(
- name='', domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_create_project_invalid_name_fails(self):
- project = unit.new_project_ref(
- name=None, domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'],
- project)
- project = unit.new_project_ref(
- name=123, domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_update_project_blank_name_fails(self):
- project = unit.new_project_ref(
- name='fake1', domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
- project['name'] = ''
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- def test_update_project_long_name_fails(self):
- project = unit.new_project_ref(
- name='fake1', domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
- project['name'] = 'a' * 65
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- def test_update_project_invalid_name_fails(self):
- project = unit.new_project_ref(
- name='fake1', domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
- project['name'] = None
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- project['name'] = 123
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- def test_update_project_invalid_enabled_type_string(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertTrue(project_ref['enabled'])
-
- # Strings are not valid boolean values
- project['enabled'] = "false"
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- def test_create_project_invalid_enabled_type_string(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- # invalid string value
- enabled="true")
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_create_project_invalid_domain_id(self):
- project = unit.new_project_ref(domain_id=uuid.uuid4().hex)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_list_domains(self):
- domain1 = unit.new_domain_ref()
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- self.resource_api.create_domain(domain2['id'], domain2)
- domains = self.resource_api.list_domains()
- self.assertEqual(3, len(domains))
- domain_ids = []
- for domain in domains:
- domain_ids.append(domain.get('id'))
- self.assertIn(CONF.identity.default_domain_id, domain_ids)
- self.assertIn(domain1['id'], domain_ids)
- self.assertIn(domain2['id'], domain_ids)
-
- def test_list_projects(self):
- project_refs = self.resource_api.list_projects()
- project_count = len(default_fixtures.TENANTS) + self.domain_count
- self.assertEqual(project_count, len(project_refs))
- for project in default_fixtures.TENANTS:
- self.assertIn(project, project_refs)
-
- def test_list_projects_with_multiple_filters(self):
- # Create a project
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project = self.resource_api.create_project(project['id'], project)
-
- # Build driver hints with the project's name and inexistent description
- hints = driver_hints.Hints()
- hints.add_filter('name', project['name'])
- hints.add_filter('description', uuid.uuid4().hex)
-
- # Retrieve projects based on hints and check an empty list is returned
- projects = self.resource_api.list_projects(hints)
- self.assertEqual([], projects)
-
- # Build correct driver hints
- hints = driver_hints.Hints()
- hints.add_filter('name', project['name'])
- hints.add_filter('description', project['description'])
-
- # Retrieve projects based on hints
- projects = self.resource_api.list_projects(hints)
-
- # Check that the returned list contains only the first project
- self.assertEqual(1, len(projects))
- self.assertEqual(project, projects[0])
-
- def test_list_projects_for_domain(self):
- project_ids = ([x['id'] for x in
- self.resource_api.list_projects_in_domain(
- CONF.identity.default_domain_id)])
- # Only the projects from the default fixtures are expected, since
- # filtering by domain does not include any project that acts as a
- # domain.
- self.assertThat(
- project_ids, matchers.HasLength(len(default_fixtures.TENANTS)))
- self.assertIn(self.tenant_bar['id'], project_ids)
- self.assertIn(self.tenant_baz['id'], project_ids)
- self.assertIn(self.tenant_mtu['id'], project_ids)
- self.assertIn(self.tenant_service['id'], project_ids)
-
- @unit.skip_if_no_multiple_domains_support
- def test_list_projects_acting_as_domain(self):
- initial_domains = self.resource_api.list_domains()
-
- # Creating 5 projects that act as domains
- new_projects_acting_as_domains = []
- for i in range(5):
- project = unit.new_project_ref(is_domain=True)
- project = self.resource_api.create_project(project['id'], project)
- new_projects_acting_as_domains.append(project)
-
- # Creating a few regular project to ensure it doesn't mess with the
- # ones that act as domains
- self._create_projects_hierarchy(hierarchy_size=2)
-
- projects = self.resource_api.list_projects_acting_as_domain()
- expected_number_projects = (
- len(initial_domains) + len(new_projects_acting_as_domains))
- self.assertEqual(expected_number_projects, len(projects))
- for project in new_projects_acting_as_domains:
- self.assertIn(project, projects)
- for domain in initial_domains:
- self.assertIn(domain['id'], [p['id'] for p in projects])
-
- @unit.skip_if_no_multiple_domains_support
- def test_list_projects_for_alternate_domain(self):
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project2['id'], project2)
- project_ids = ([x['id'] for x in
- self.resource_api.list_projects_in_domain(
- domain1['id'])])
- self.assertEqual(2, len(project_ids))
- self.assertIn(project1['id'], project_ids)
- self.assertIn(project2['id'], project_ids)
-
- def _create_projects_hierarchy(self, hierarchy_size=2,
- domain_id=None,
- is_domain=False,
- parent_project_id=None):
- """Creates a project hierarchy with specified size.
-
- :param hierarchy_size: the desired hierarchy size, default is 2 -
- a project with one child.
- :param domain_id: domain where the projects hierarchy will be created.
- :param is_domain: if the hierarchy will have the is_domain flag active
- or not.
- :param parent_project_id: if the intention is to create a
- sub-hierarchy, sets the sub-hierarchy root. Defaults to creating
- a new hierarchy, i.e. a new root project.
-
- :returns projects: a list of the projects in the created hierarchy.
-
- """
- if domain_id is None:
- domain_id = CONF.identity.default_domain_id
- if parent_project_id:
- project = unit.new_project_ref(parent_id=parent_project_id,
- domain_id=domain_id,
- is_domain=is_domain)
- else:
- project = unit.new_project_ref(domain_id=domain_id,
- is_domain=is_domain)
- project_id = project['id']
- project = self.resource_api.create_project(project_id, project)
-
- projects = [project]
- for i in range(1, hierarchy_size):
- new_project = unit.new_project_ref(parent_id=project_id,
- domain_id=domain_id)
-
- self.resource_api.create_project(new_project['id'], new_project)
- projects.append(new_project)
- project_id = new_project['id']
-
- return projects
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_domain_with_project_api(self):
- project = unit.new_project_ref(is_domain=True)
- ref = self.resource_api.create_project(project['id'], project)
- self.assertTrue(ref['is_domain'])
- self.resource_api.get_domain(ref['id'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_project_as_a_domain_uniqueness_constraints(self):
- """Tests project uniqueness for those acting as domains.
-
- If it is a project acting as a domain, we can't have two or more with
- the same name.
-
- """
- # Create two projects acting as a domain
- project = unit.new_project_ref(is_domain=True)
- project = self.resource_api.create_project(project['id'], project)
- project2 = unit.new_project_ref(is_domain=True)
- project2 = self.resource_api.create_project(project2['id'], project2)
-
- # All projects acting as domains have a null domain_id, so should not
- # be able to create another with the same name but a different
- # project ID.
- new_project = project.copy()
- new_project['id'] = uuid.uuid4().hex
-
- self.assertRaises(exception.Conflict,
- self.resource_api.create_project,
- new_project['id'],
- new_project)
-
- # We also should not be able to update one to have a name clash
- project2['name'] = project['name']
- self.assertRaises(exception.Conflict,
- self.resource_api.update_project,
- project2['id'],
- project2)
-
- # But updating it to a unique name is OK
- project2['name'] = uuid.uuid4().hex
- self.resource_api.update_project(project2['id'], project2)
-
- # Finally, it should be OK to create a project with same name as one of
- # these acting as a domain, as long as it is a regular project
- project3 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id, name=project2['name'])
- self.resource_api.create_project(project3['id'], project3)
- # In fact, it should be OK to create such a project in the domain which
- # has the matching name.
- # TODO(henry-nash): Once we fully support projects acting as a domain,
- # add a test here to create a sub-project with a name that matches its
- # project acting as a domain
-
- @unit.skip_if_no_multiple_domains_support
- @test_utils.wip('waiting for sub projects acting as domains support')
- def test_is_domain_sub_project_has_parent_domain_id(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id, is_domain=True)
- self.resource_api.create_project(project['id'], project)
-
- sub_project = unit.new_project_ref(domain_id=project['id'],
- parent_id=project['id'],
- is_domain=True)
-
- ref = self.resource_api.create_project(sub_project['id'], sub_project)
- self.assertTrue(ref['is_domain'])
- self.assertEqual(project['id'], ref['parent_id'])
- self.assertEqual(project['id'], ref['domain_id'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_delete_domain_with_project_api(self):
- project = unit.new_project_ref(domain_id=None,
- is_domain=True)
- self.resource_api.create_project(project['id'], project)
-
- # Check that a corresponding domain was created
- self.resource_api.get_domain(project['id'])
-
- # Try to delete the enabled project that acts as a domain
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.delete_project,
- project['id'])
-
- # Disable the project
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
-
- # Successfully delete the project
- self.resource_api.delete_project(project['id'])
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- project['id'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_subproject_acting_as_domain_fails(self):
- root_project = unit.new_project_ref(is_domain=True)
- self.resource_api.create_project(root_project['id'], root_project)
-
- sub_project = unit.new_project_ref(is_domain=True,
- parent_id=root_project['id'])
-
- # Creation of sub projects acting as domains is not allowed yet
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- sub_project['id'], sub_project)
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_domain_under_regular_project_hierarchy_fails(self):
- # Projects acting as domains can't have a regular project as parent
- projects_hierarchy = self._create_projects_hierarchy()
- parent = projects_hierarchy[1]
- project = unit.new_project_ref(domain_id=parent['id'],
- parent_id=parent['id'],
- is_domain=True)
-
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'], project)
-
- @unit.skip_if_no_multiple_domains_support
- @test_utils.wip('waiting for sub projects acting as domains support')
- def test_create_project_under_domain_hierarchy(self):
- projects_hierarchy = self._create_projects_hierarchy(is_domain=True)
- parent = projects_hierarchy[1]
- project = unit.new_project_ref(domain_id=parent['id'],
- parent_id=parent['id'],
- is_domain=False)
-
- ref = self.resource_api.create_project(project['id'], project)
- self.assertFalse(ref['is_domain'])
- self.assertEqual(parent['id'], ref['parent_id'])
- self.assertEqual(parent['id'], ref['domain_id'])
-
- def test_create_project_without_is_domain_flag(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- del project['is_domain']
- ref = self.resource_api.create_project(project['id'], project)
- # The is_domain flag should be False by default
- self.assertFalse(ref['is_domain'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_project_passing_is_domain_flag_true(self):
- project = unit.new_project_ref(is_domain=True)
-
- ref = self.resource_api.create_project(project['id'], project)
- self.assertTrue(ref['is_domain'])
-
- def test_create_project_passing_is_domain_flag_false(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id, is_domain=False)
-
- ref = self.resource_api.create_project(project['id'], project)
- self.assertIs(False, ref['is_domain'])
-
- @test_utils.wip('waiting for support for parent_id to imply domain_id')
- def test_create_project_with_parent_id_and_without_domain_id(self):
- # First create a domain
- project = unit.new_project_ref(is_domain=True)
- self.resource_api.create_project(project['id'], project)
- # Now create a child by just naming the parent_id
- sub_project = unit.new_project_ref(parent_id=project['id'])
- ref = self.resource_api.create_project(sub_project['id'], sub_project)
-
- # The domain_id should be set to the parent domain_id
- self.assertEqual(project['domain_id'], ref['domain_id'])
-
- def test_create_project_with_domain_id_and_without_parent_id(self):
- # First create a domain
- project = unit.new_project_ref(is_domain=True)
- self.resource_api.create_project(project['id'], project)
- # Now create a child by just naming the domain_id
- sub_project = unit.new_project_ref(domain_id=project['id'])
- ref = self.resource_api.create_project(sub_project['id'], sub_project)
-
- # The parent_id and domain_id should be set to the id of the project
- # acting as a domain
- self.assertEqual(project['id'], ref['parent_id'])
- self.assertEqual(project['id'], ref['domain_id'])
-
- def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
- # First create a domain
- project = unit.new_project_ref(is_domain=True)
- self.resource_api.create_project(project['id'], project)
- # Now try to create a child with the above as its parent, but
- # specifying a different domain.
- sub_project = unit.new_project_ref(
- parent_id=project['id'], domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- sub_project['id'], sub_project)
-
- def test_check_leaf_projects(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- self.assertFalse(self.resource_api.is_leaf_project(
- root_project['id']))
- self.assertTrue(self.resource_api.is_leaf_project(
- leaf_project['id']))
-
- # Delete leaf_project
- self.resource_api.delete_project(leaf_project['id'])
-
- # Now, root_project should be leaf
- self.assertTrue(self.resource_api.is_leaf_project(
- root_project['id']))
-
- def test_list_projects_in_subtree(self):
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- project1 = projects_hierarchy[0]
- project2 = projects_hierarchy[1]
- project3 = projects_hierarchy[2]
- project4 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- parent_id=project2['id'])
- self.resource_api.create_project(project4['id'], project4)
-
- subtree = self.resource_api.list_projects_in_subtree(project1['id'])
- self.assertEqual(3, len(subtree))
- self.assertIn(project2, subtree)
- self.assertIn(project3, subtree)
- self.assertIn(project4, subtree)
-
- subtree = self.resource_api.list_projects_in_subtree(project2['id'])
- self.assertEqual(2, len(subtree))
- self.assertIn(project3, subtree)
- self.assertIn(project4, subtree)
-
- subtree = self.resource_api.list_projects_in_subtree(project3['id'])
- self.assertEqual(0, len(subtree))
-
- def test_list_projects_in_subtree_with_circular_reference(self):
- project1 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project1 = self.resource_api.create_project(project1['id'], project1)
-
- project2 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- parent_id=project1['id'])
- self.resource_api.create_project(project2['id'], project2)
-
- project1['parent_id'] = project2['id'] # Adds cyclic reference
-
- # NOTE(dstanek): The manager does not allow parent_id to be updated.
- # Instead will directly use the driver to create the cyclic
- # reference.
- self.resource_api.driver.update_project(project1['id'], project1)
-
- subtree = self.resource_api.list_projects_in_subtree(project1['id'])
-
- # NOTE(dstanek): If a cyclic reference is detected the code bails
- # and returns None instead of falling into the infinite
- # recursion trap.
- self.assertIsNone(subtree)
-
- def test_list_projects_in_subtree_invalid_project_id(self):
- self.assertRaises(exception.ValidationError,
- self.resource_api.list_projects_in_subtree,
- None)
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.list_projects_in_subtree,
- uuid.uuid4().hex)
-
- def test_list_project_parents(self):
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- project1 = projects_hierarchy[0]
- project2 = projects_hierarchy[1]
- project3 = projects_hierarchy[2]
- project4 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- parent_id=project2['id'])
- self.resource_api.create_project(project4['id'], project4)
-
- parents1 = self.resource_api.list_project_parents(project3['id'])
- self.assertEqual(3, len(parents1))
- self.assertIn(project1, parents1)
- self.assertIn(project2, parents1)
-
- parents2 = self.resource_api.list_project_parents(project4['id'])
- self.assertEqual(parents1, parents2)
-
- parents = self.resource_api.list_project_parents(project1['id'])
- # It has the default domain as parent
- self.assertEqual(1, len(parents))
-
- def test_update_project_enabled_cascade(self):
- """Test update_project_cascade
-
- Ensures the enabled attribute is correctly updated across
- a simple 3-level projects hierarchy.
- """
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- parent = projects_hierarchy[0]
-
- # Disable in parent project disables the whole subtree
- parent['enabled'] = False
- # Store the ref from backend in another variable so we don't bother
- # to remove other attributes that were not originally provided and
- # were set in the manager, like parent_id and domain_id.
- parent_ref = self.resource_api.update_project(parent['id'],
- parent,
- cascade=True)
-
- subtree = self.resource_api.list_projects_in_subtree(parent['id'])
- self.assertEqual(2, len(subtree))
- self.assertFalse(parent_ref['enabled'])
- self.assertFalse(subtree[0]['enabled'])
- self.assertFalse(subtree[1]['enabled'])
-
- # Enable parent project enables the whole subtree
- parent['enabled'] = True
- parent_ref = self.resource_api.update_project(parent['id'],
- parent,
- cascade=True)
-
- subtree = self.resource_api.list_projects_in_subtree(parent['id'])
- self.assertEqual(2, len(subtree))
- self.assertTrue(parent_ref['enabled'])
- self.assertTrue(subtree[0]['enabled'])
- self.assertTrue(subtree[1]['enabled'])
-
- def test_cannot_enable_cascade_with_parent_disabled(self):
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- grandparent = projects_hierarchy[0]
- parent = projects_hierarchy[1]
-
- grandparent['enabled'] = False
- self.resource_api.update_project(grandparent['id'],
- grandparent,
- cascade=True)
- subtree = self.resource_api.list_projects_in_subtree(parent['id'])
- self.assertFalse(subtree[0]['enabled'])
-
- parent['enabled'] = True
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.update_project,
- parent['id'],
- parent,
- cascade=True)
-
- def test_update_cascade_only_accepts_enabled(self):
- # Update cascade does not accept any other attribute but 'enabled'
- new_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(new_project['id'], new_project)
-
- new_project['name'] = 'project1'
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- new_project['id'],
- new_project,
- cascade=True)
-
- def test_list_project_parents_invalid_project_id(self):
- self.assertRaises(exception.ValidationError,
- self.resource_api.list_project_parents,
- None)
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.list_project_parents,
- uuid.uuid4().hex)
-
- def test_create_project_doesnt_modify_passed_in_dict(self):
- new_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- original_project = new_project.copy()
- self.resource_api.create_project(new_project['id'], new_project)
- self.assertDictEqual(original_project, new_project)
-
- def test_update_project_enable(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertTrue(project_ref['enabled'])
-
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertEqual(project['enabled'], project_ref['enabled'])
-
- # If not present, enabled field should not be updated
- del project['enabled']
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertFalse(project_ref['enabled'])
-
- project['enabled'] = True
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertEqual(project['enabled'], project_ref['enabled'])
-
- del project['enabled']
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertTrue(project_ref['enabled'])
-
- def test_create_invalid_domain_fails(self):
- new_group = unit.new_group_ref(domain_id="doesnotexist")
- self.assertRaises(exception.DomainNotFound,
- self.identity_api.create_group,
- new_group)
- new_user = unit.new_user_ref(domain_id="doesnotexist")
- self.assertRaises(exception.DomainNotFound,
- self.identity_api.create_user,
- new_user)
-
- @unit.skip_if_no_multiple_domains_support
- def test_project_crud(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- project = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictContainsSubset(project, project_ref)
-
- project['name'] = uuid.uuid4().hex
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictContainsSubset(project, project_ref)
-
- self.resource_api.delete_project(project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- def test_domain_delete_hierarchy(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
-
- # Creating a root and a leaf project inside the domain
- projects_hierarchy = self._create_projects_hierarchy(
- domain_id=domain['id'])
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[0]
-
- # Disable the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
-
- # Delete the domain
- self.resource_api.delete_domain(domain['id'])
-
- # Make sure the domain no longer exists
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- # Make sure the root project no longer exists
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project['id'])
-
- # Make sure the leaf project no longer exists
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- leaf_project['id'])
-
- def test_delete_projects_from_ids(self):
- """Tests the resource backend call delete_projects_from_ids.
-
- Tests the normal flow of the delete_projects_from_ids backend call,
- that ensures no project on the list exists after it is succesfully
- called.
- """
- project1_ref = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project2_ref = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- projects = (project1_ref, project2_ref)
- for project in projects:
- self.resource_api.create_project(project['id'], project)
-
- # Setting up the ID's list
- projects_ids = [p['id'] for p in projects]
- self.resource_api.driver.delete_projects_from_ids(projects_ids)
-
- # Ensuring projects no longer exist at backend level
- for project_id in projects_ids:
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.driver.get_project,
- project_id)
-
- # Passing an empty list is silently ignored
- self.resource_api.driver.delete_projects_from_ids([])
-
- def test_delete_projects_from_ids_with_no_existing_project_id(self):
- """Tests delete_projects_from_ids issues warning if not found.
-
- Tests the resource backend call delete_projects_from_ids passing a
- non existing ID in project_ids, which is logged and ignored by
- the backend.
- """
- project_ref = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project_ref['id'], project_ref)
-
- # Setting up the ID's list
- projects_ids = (project_ref['id'], uuid.uuid4().hex)
- with mock.patch('keystone.resource.backends.sql.LOG') as mock_log:
- self.resource_api.delete_projects_from_ids(projects_ids)
- self.assertTrue(mock_log.warning.called)
- # The existing project was deleted.
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.driver.get_project,
- project_ref['id'])
-
- # Even if we only have one project, and it does not exist, it returns
- # no error.
- self.resource_api.driver.delete_projects_from_ids([uuid.uuid4().hex])
-
- def test_delete_project_cascade(self):
- # create a hierarchy with 3 levels
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- root_project = projects_hierarchy[0]
- project1 = projects_hierarchy[1]
- project2 = projects_hierarchy[2]
-
- # Disabling all projects before attempting to delete
- for project in (project2, project1, root_project):
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
-
- self.resource_api.delete_project(root_project['id'], cascade=True)
-
- for project in projects_hierarchy:
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- def test_delete_large_project_cascade(self):
- """Try delete a large project with cascade true.
-
- Tree we will create::
-
- +-p1-+
- | |
- p5 p2
- | |
- p6 +-p3-+
- | |
- p7 p4
- """
- # create a hierarchy with 4 levels
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=4)
- p1 = projects_hierarchy[0]
- # Add the left branch to the hierarchy (p5, p6)
- self._create_projects_hierarchy(hierarchy_size=2,
- parent_project_id=p1['id'])
- # Add p7 to the hierarchy
- p3_id = projects_hierarchy[2]['id']
- self._create_projects_hierarchy(hierarchy_size=1,
- parent_project_id=p3_id)
- # Reverse the hierarchy to disable the leaf first
- prjs_hierarchy = ([p1] + self.resource_api.list_projects_in_subtree(
- p1['id']))[::-1]
-
- # Disabling all projects before attempting to delete
- for project in prjs_hierarchy:
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
-
- self.resource_api.delete_project(p1['id'], cascade=True)
- for project in prjs_hierarchy:
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- def test_cannot_delete_project_cascade_with_enabled_child(self):
- # create a hierarchy with 3 levels
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- root_project = projects_hierarchy[0]
- project1 = projects_hierarchy[1]
- project2 = projects_hierarchy[2]
-
- project2['enabled'] = False
- self.resource_api.update_project(project2['id'], project2)
-
- # Cannot cascade delete root_project, since project1 is enabled
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.delete_project,
- root_project['id'],
- cascade=True)
-
- # Ensuring no project was deleted, not even project2
- self.resource_api.get_project(root_project['id'])
- self.resource_api.get_project(project1['id'])
- self.resource_api.get_project(project2['id'])
-
- def test_hierarchical_projects_crud(self):
- # create a hierarchy with just a root project (which is a leaf as well)
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=1)
- root_project1 = projects_hierarchy[0]
-
- # create a hierarchy with one root project and one leaf project
- projects_hierarchy = self._create_projects_hierarchy()
- root_project2 = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- # update description from leaf_project
- leaf_project['description'] = 'new description'
- self.resource_api.update_project(leaf_project['id'], leaf_project)
- proj_ref = self.resource_api.get_project(leaf_project['id'])
- self.assertDictEqual(leaf_project, proj_ref)
-
- # update the parent_id is not allowed
- leaf_project['parent_id'] = root_project1['id']
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.update_project,
- leaf_project['id'],
- leaf_project)
-
- # delete root_project1
- self.resource_api.delete_project(root_project1['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project1['id'])
-
- # delete root_project2 is not allowed since it is not a leaf project
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.delete_project,
- root_project2['id'])
-
- def test_create_project_with_invalid_parent(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id, parent_id='fake')
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.create_project,
- project['id'],
- project)
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_leaf_project_with_different_domain(self):
- root_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(root_project['id'], root_project)
-
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- leaf_project = unit.new_project_ref(domain_id=domain['id'],
- parent_id=root_project['id'])
-
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- leaf_project['id'],
- leaf_project)
-
- def test_delete_hierarchical_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- self.resource_api.delete_project(leaf_project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- leaf_project['id'])
-
- self.resource_api.delete_project(root_project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project['id'])
-
- def test_delete_hierarchical_not_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
-
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.delete_project,
- root_project['id'])
-
- def test_update_project_parent(self):
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- project1 = projects_hierarchy[0]
- project2 = projects_hierarchy[1]
- project3 = projects_hierarchy[2]
-
- # project2 is the parent from project3
- self.assertEqual(project3.get('parent_id'), project2['id'])
-
- # try to update project3 parent to parent1
- project3['parent_id'] = project1['id']
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.update_project,
- project3['id'],
- project3)
-
- def test_create_project_under_disabled_one(self):
- project1 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id, enabled=False)
- self.resource_api.create_project(project1['id'], project1)
-
- project2 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- parent_id=project1['id'])
-
- # It's not possible to create a project under a disabled one in the
- # hierarchy
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project2['id'],
- project2)
-
- def test_disable_hierarchical_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- leaf_project = projects_hierarchy[1]
-
- leaf_project['enabled'] = False
- self.resource_api.update_project(leaf_project['id'], leaf_project)
-
- project_ref = self.resource_api.get_project(leaf_project['id'])
- self.assertEqual(leaf_project['enabled'], project_ref['enabled'])
-
- def test_disable_hierarchical_not_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
-
- root_project['enabled'] = False
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.update_project,
- root_project['id'],
- root_project)
-
- def test_enable_project_with_disabled_parent(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- # Disable leaf and root
- leaf_project['enabled'] = False
- self.resource_api.update_project(leaf_project['id'], leaf_project)
- root_project['enabled'] = False
- self.resource_api.update_project(root_project['id'], root_project)
-
- # Try to enable the leaf project, it's not possible since it has
- # a disabled parent
- leaf_project['enabled'] = True
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.update_project,
- leaf_project['id'],
- leaf_project)
-
- def _get_hierarchy_depth(self, project_id):
- return len(self.resource_api.list_project_parents(project_id)) + 1
-
- def test_check_hierarchy_depth(self):
- # Should be allowed to have a hierarchy of the max depth specified
- # in the config option plus one (to allow for the additional project
- # acting as a domain after an upgrade)
- projects_hierarchy = self._create_projects_hierarchy(
- CONF.max_project_tree_depth)
- leaf_project = projects_hierarchy[CONF.max_project_tree_depth - 1]
-
- depth = self._get_hierarchy_depth(leaf_project['id'])
- self.assertEqual(CONF.max_project_tree_depth + 1, depth)
-
- # Creating another project in the hierarchy shouldn't be allowed
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id,
- parent_id=leaf_project['id'])
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_project_update_missing_attrs_with_a_value(self):
- # Creating a project with no description attribute.
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- del project['description']
- project = self.resource_api.create_project(project['id'], project)
-
- # Add a description attribute.
- project['description'] = uuid.uuid4().hex
- self.resource_api.update_project(project['id'], project)
-
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project, project_ref)
-
- def test_project_update_missing_attrs_with_a_falsey_value(self):
- # Creating a project with no description attribute.
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- del project['description']
- project = self.resource_api.create_project(project['id'], project)
-
- # Add a description attribute.
- project['description'] = ''
- self.resource_api.update_project(project['id'], project)
-
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project, project_ref)
-
- def test_domain_crud(self):
- domain = unit.new_domain_ref()
- domain_ref = self.resource_api.create_domain(domain['id'], domain)
- self.assertDictEqual(domain, domain_ref)
- domain_ref = self.resource_api.get_domain(domain['id'])
- self.assertDictEqual(domain, domain_ref)
-
- domain['name'] = uuid.uuid4().hex
- domain_ref = self.resource_api.update_domain(domain['id'], domain)
- self.assertDictEqual(domain, domain_ref)
- domain_ref = self.resource_api.get_domain(domain['id'])
- self.assertDictEqual(domain, domain_ref)
-
- # Ensure an 'enabled' domain cannot be deleted
- self.assertRaises(exception.ForbiddenNotSecurity,
- self.resource_api.delete_domain,
- domain_id=domain['id'])
-
- # Disable the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
-
- # Delete the domain
- self.resource_api.delete_domain(domain['id'])
-
- # Make sure the domain no longer exists
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_domain_name_case_sensitivity(self):
- # create a ref with a lowercase name
- domain_name = 'test_domain'
- ref = unit.new_domain_ref(name=domain_name)
-
- lower_case_domain = self.resource_api.create_domain(ref['id'], ref)
-
- # assign a new ID to the ref with the same name, but in uppercase
- ref['id'] = uuid.uuid4().hex
- ref['name'] = domain_name.upper()
- upper_case_domain = self.resource_api.create_domain(ref['id'], ref)
-
- # We can get each domain by name
- lower_case_domain_ref = self.resource_api.get_domain_by_name(
- domain_name)
- self.assertDictEqual(lower_case_domain, lower_case_domain_ref)
-
- upper_case_domain_ref = self.resource_api.get_domain_by_name(
- domain_name.upper())
- self.assertDictEqual(upper_case_domain, upper_case_domain_ref)
-
- def test_project_attribute_update(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
-
- # pick a key known to be non-existent
- key = 'description'
-
- def assert_key_equals(value):
- project_ref = self.resource_api.update_project(
- project['id'], project)
- self.assertEqual(value, project_ref[key])
- project_ref = self.resource_api.get_project(project['id'])
- self.assertEqual(value, project_ref[key])
-
- def assert_get_key_is(value):
- project_ref = self.resource_api.update_project(
- project['id'], project)
- self.assertIs(project_ref.get(key), value)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertIs(project_ref.get(key), value)
-
- # add an attribute that doesn't exist, set it to a falsey value
- value = ''
- project[key] = value
- assert_key_equals(value)
-
- # set an attribute with a falsey value to null
- value = None
- project[key] = value
- assert_get_key_is(value)
-
- # do it again, in case updating from this situation is handled oddly
- value = None
- project[key] = value
- assert_get_key_is(value)
-
- # set a possibly-null value to a falsey value
- value = ''
- project[key] = value
- assert_key_equals(value)
-
- # set a falsey value to a truthy value
- value = uuid.uuid4().hex
- project[key] = value
- assert_key_equals(value)
-
- @unit.skip_if_cache_disabled('resource')
- @unit.skip_if_no_multiple_domains_support
- def test_domain_rename_invalidates_get_domain_by_name_cache(self):
- domain = unit.new_domain_ref()
- domain_id = domain['id']
- domain_name = domain['name']
- self.resource_api.create_domain(domain_id, domain)
- domain_ref = self.resource_api.get_domain_by_name(domain_name)
- domain_ref['name'] = uuid.uuid4().hex
- self.resource_api.update_domain(domain_id, domain_ref)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain_by_name,
- domain_name)
-
- @unit.skip_if_cache_disabled('resource')
- def test_cache_layer_domain_crud(self):
- domain = unit.new_domain_ref()
- domain_id = domain['id']
- # Create Domain
- self.resource_api.create_domain(domain_id, domain)
- project_domain_ref = self.resource_api.get_project(domain_id)
- domain_ref = self.resource_api.get_domain(domain_id)
- updated_project_domain_ref = copy.deepcopy(project_domain_ref)
- updated_project_domain_ref['name'] = uuid.uuid4().hex
- updated_domain_ref = copy.deepcopy(domain_ref)
- updated_domain_ref['name'] = updated_project_domain_ref['name']
- # Update domain, bypassing resource api manager
- self.resource_api.driver.update_project(domain_id,
- updated_project_domain_ref)
- # Verify get_domain still returns the domain
- self.assertDictContainsSubset(
- domain_ref, self.resource_api.get_domain(domain_id))
- # Invalidate cache
- self.resource_api.get_domain.invalidate(self.resource_api,
- domain_id)
- # Verify get_domain returns the updated domain
- self.assertDictContainsSubset(
- updated_domain_ref, self.resource_api.get_domain(domain_id))
- # Update the domain back to original ref, using the assignment api
- # manager
- self.resource_api.update_domain(domain_id, domain_ref)
- self.assertDictContainsSubset(
- domain_ref, self.resource_api.get_domain(domain_id))
- # Make sure domain is 'disabled', bypass resource api manager
- project_domain_ref_disabled = project_domain_ref.copy()
- project_domain_ref_disabled['enabled'] = False
- self.resource_api.driver.update_project(domain_id,
- project_domain_ref_disabled)
- self.resource_api.driver.update_project(domain_id, {'enabled': False})
- # Delete domain, bypassing resource api manager
- self.resource_api.driver.delete_project(domain_id)
- # Verify get_domain still returns the domain
- self.assertDictContainsSubset(
- domain_ref, self.resource_api.get_domain(domain_id))
- # Invalidate cache
- self.resource_api.get_domain.invalidate(self.resource_api,
- domain_id)
- # Verify get_domain now raises DomainNotFound
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain, domain_id)
- # Recreate Domain
- self.resource_api.create_domain(domain_id, domain)
- self.resource_api.get_domain(domain_id)
- # Make sure domain is 'disabled', bypass resource api manager
- domain['enabled'] = False
- self.resource_api.driver.update_project(domain_id, domain)
- self.resource_api.driver.update_project(domain_id, {'enabled': False})
- # Delete domain
- self.resource_api.delete_domain(domain_id)
- # verify DomainNotFound raised
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain_id)
-
- @unit.skip_if_cache_disabled('resource')
- @unit.skip_if_no_multiple_domains_support
- def test_project_rename_invalidates_get_project_by_name_cache(self):
- domain = unit.new_domain_ref()
- project = unit.new_project_ref(domain_id=domain['id'])
- project_id = project['id']
- project_name = project['name']
- self.resource_api.create_domain(domain['id'], domain)
- # Create a project
- self.resource_api.create_project(project_id, project)
- self.resource_api.get_project_by_name(project_name, domain['id'])
- project['name'] = uuid.uuid4().hex
- self.resource_api.update_project(project_id, project)
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project_by_name,
- project_name,
- domain['id'])
-
- @unit.skip_if_cache_disabled('resource')
- @unit.skip_if_no_multiple_domains_support
- def test_cache_layer_project_crud(self):
- domain = unit.new_domain_ref()
- project = unit.new_project_ref(domain_id=domain['id'])
- project_id = project['id']
- self.resource_api.create_domain(domain['id'], domain)
- # Create a project
- self.resource_api.create_project(project_id, project)
- self.resource_api.get_project(project_id)
- updated_project = copy.deepcopy(project)
- updated_project['name'] = uuid.uuid4().hex
- # Update project, bypassing resource manager
- self.resource_api.driver.update_project(project_id,
- updated_project)
- # Verify get_project still returns the original project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Invalidate cache
- self.resource_api.get_project.invalidate(self.resource_api,
- project_id)
- # Verify get_project now returns the new project
- self.assertDictContainsSubset(
- updated_project,
- self.resource_api.get_project(project_id))
- # Update project using the resource_api manager back to original
- self.resource_api.update_project(project['id'], project)
- # Verify get_project returns the original project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Delete project bypassing resource
- self.resource_api.driver.delete_project(project_id)
- # Verify get_project still returns the project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Invalidate cache
- self.resource_api.get_project.invalidate(self.resource_api,
- project_id)
- # Verify ProjectNotFound now raised
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project_id)
- # recreate project
- self.resource_api.create_project(project_id, project)
- self.resource_api.get_project(project_id)
- # delete project
- self.resource_api.delete_project(project_id)
- # Verify ProjectNotFound is raised
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project_id)
-
- @unit.skip_if_no_multiple_domains_support
- def test_get_default_domain_by_name(self):
- domain_name = 'default'
-
- domain = unit.new_domain_ref(name=domain_name)
- self.resource_api.create_domain(domain['id'], domain)
-
- domain_ref = self.resource_api.get_domain_by_name(domain_name)
- self.assertEqual(domain, domain_ref)
-
- def test_get_not_default_domain_by_name(self):
- domain_name = 'foo'
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain_by_name,
- domain_name)
-
- def test_project_update_and_project_get_return_same_response(self):
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
-
- self.resource_api.create_project(project['id'], project)
-
- updated_project = {'enabled': False}
- updated_project_ref = self.resource_api.update_project(
- project['id'], updated_project)
-
- # SQL backend adds 'extra' field
- updated_project_ref.pop('extra', None)
-
- self.assertIs(False, updated_project_ref['enabled'])
-
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(updated_project_ref, project_ref)
-
-
-class ResourceDriverTests(object):
- """Tests for the resource driver.
-
- Subclasses must set self.driver to the driver instance.
-
- """
-
- def test_create_project(self):
- project_id = uuid.uuid4().hex
- project = {
- 'name': uuid.uuid4().hex,
- 'id': project_id,
- 'domain_id': uuid.uuid4().hex,
- }
- self.driver.create_project(project_id, project)
-
- def test_create_project_all_defined_properties(self):
- project_id = uuid.uuid4().hex
- project = {
- 'name': uuid.uuid4().hex,
- 'id': project_id,
- 'domain_id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'parent_id': uuid.uuid4().hex,
- 'is_domain': True,
- }
- self.driver.create_project(project_id, project)
-
- def test_create_project_null_domain(self):
- project_id = uuid.uuid4().hex
- project = {
- 'name': uuid.uuid4().hex,
- 'id': project_id,
- 'domain_id': None,
- }
- self.driver.create_project(project_id, project)
-
- def test_create_project_same_name_same_domain_conflict(self):
- name = uuid.uuid4().hex
- domain_id = uuid.uuid4().hex
-
- project_id = uuid.uuid4().hex
- project = {
- 'name': name,
- 'id': project_id,
- 'domain_id': domain_id,
- }
- self.driver.create_project(project_id, project)
-
- project_id = uuid.uuid4().hex
- project = {
- 'name': name,
- 'id': project_id,
- 'domain_id': domain_id,
- }
- self.assertRaises(exception.Conflict, self.driver.create_project,
- project_id, project)
-
- def test_create_project_same_id_conflict(self):
- project_id = uuid.uuid4().hex
-
- project = {
- 'name': uuid.uuid4().hex,
- 'id': project_id,
- 'domain_id': uuid.uuid4().hex,
- }
- self.driver.create_project(project_id, project)
-
- project = {
- 'name': uuid.uuid4().hex,
- 'id': project_id,
- 'domain_id': uuid.uuid4().hex,
- }
- self.assertRaises(exception.Conflict, self.driver.create_project,
- project_id, project)
diff --git a/keystone-moon/keystone/tests/unit/resource/test_controllers.py b/keystone-moon/keystone/tests/unit/resource/test_controllers.py
deleted file mode 100644
index b8f247c8..00000000
--- a/keystone-moon/keystone/tests/unit/resource/test_controllers.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2016 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-
-from keystone import exception
-from keystone.resource import controllers
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}}
-
-
-class TenantTestCaseNoDefaultDomain(unit.TestCase):
-
- def setUp(self):
- super(TenantTestCaseNoDefaultDomain, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
- self.tenant_controller = controllers.Tenant()
-
- def test_setup(self):
- # Other tests in this class assume there's no default domain, so make
- # sure the setUp worked as expected.
- self.assertRaises(
- exception.DomainNotFound,
- self.resource_api.get_domain, CONF.identity.default_domain_id)
-
- def test_get_all_projects(self):
- # When get_all_projects is done and there's no default domain, the
- # result is an empty list.
- res = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT)
- self.assertEqual([], res['tenants'])
-
- def test_create_project(self):
- # When a project is created using the v2 controller and there's no
- # default domain, it doesn't fail with can't find domain (a default
- # domain is created)
- tenant = {'name': uuid.uuid4().hex}
- self.tenant_controller.create_project(_ADMIN_CONTEXT, tenant)
- # If the above doesn't fail then this is successful.
diff --git a/keystone-moon/keystone/tests/unit/resource/test_core.py b/keystone-moon/keystone/tests/unit/resource/test_core.py
deleted file mode 100644
index 2eb87e4c..00000000
--- a/keystone-moon/keystone/tests/unit/resource/test_core.py
+++ /dev/null
@@ -1,692 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-import mock
-from testtools import matchers
-
-from oslo_config import cfg
-from oslotest import mockpatch
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-
-class TestResourceManagerNoFixtures(unit.SQLDriverOverrides, unit.TestCase):
-
- def setUp(self):
- super(TestResourceManagerNoFixtures, self).setUp()
- self.useFixture(database.Database(self.sql_driver_version_overrides))
- self.load_backends()
-
- def test_ensure_default_domain_exists(self):
- # When there's no default domain, ensure_default_domain_exists creates
- # it.
-
- # First make sure there's no default domain.
- self.assertRaises(
- exception.DomainNotFound,
- self.resource_api.get_domain, CONF.identity.default_domain_id)
-
- self.resource_api.ensure_default_domain_exists()
- default_domain = self.resource_api.get_domain(
- CONF.identity.default_domain_id)
-
- expected_domain = {
- 'id': CONF.identity.default_domain_id,
- 'name': 'Default',
- 'enabled': True,
- 'description': 'Domain created automatically to support V2.0 '
- 'operations.',
- }
- self.assertEqual(expected_domain, default_domain)
-
- def test_ensure_default_domain_exists_already_exists(self):
- # When there's already a default domain, ensure_default_domain_exists
- # doesn't do anything.
-
- name = uuid.uuid4().hex
- description = uuid.uuid4().hex
- domain_attrs = {
- 'id': CONF.identity.default_domain_id,
- 'name': name,
- 'description': description,
- }
- self.resource_api.create_domain(CONF.identity.default_domain_id,
- domain_attrs)
-
- self.resource_api.ensure_default_domain_exists()
-
- default_domain = self.resource_api.get_domain(
- CONF.identity.default_domain_id)
-
- expected_domain = {
- 'id': CONF.identity.default_domain_id,
- 'name': name,
- 'enabled': True,
- 'description': description,
- }
-
- self.assertEqual(expected_domain, default_domain)
-
- def test_ensure_default_domain_exists_fails(self):
- # When there's an unexpected exception creating domain it's passed on.
-
- self.useFixture(mockpatch.PatchObject(
- self.resource_api, 'create_domain',
- side_effect=exception.UnexpectedError))
-
- self.assertRaises(exception.UnexpectedError,
- self.resource_api.ensure_default_domain_exists)
-
- def test_update_project_name_conflict(self):
- name = uuid.uuid4().hex
- description = uuid.uuid4().hex
- domain_attrs = {
- 'id': CONF.identity.default_domain_id,
- 'name': name,
- 'description': description,
- }
- domain = self.resource_api.create_domain(
- CONF.identity.default_domain_id, domain_attrs)
- project1 = unit.new_project_ref(domain_id=domain['id'],
- name=uuid.uuid4().hex)
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'],
- name=uuid.uuid4().hex)
- project = self.resource_api.create_project(project2['id'], project2)
-
- self.assertRaises(exception.Conflict,
- self.resource_api.update_project,
- project['id'], {'name': project1['name']})
-
-
-class DomainConfigDriverTests(object):
-
- def _domain_config_crud(self, sensitive):
- domain = uuid.uuid4().hex
- group = uuid.uuid4().hex
- option = uuid.uuid4().hex
- value = uuid.uuid4().hex
- self.driver.create_config_option(
- domain, group, option, value, sensitive)
- res = self.driver.get_config_option(
- domain, group, option, sensitive)
- config = {'group': group, 'option': option, 'value': value}
- self.assertEqual(config, res)
-
- value = uuid.uuid4().hex
- self.driver.update_config_option(
- domain, group, option, value, sensitive)
- res = self.driver.get_config_option(
- domain, group, option, sensitive)
- config = {'group': group, 'option': option, 'value': value}
- self.assertEqual(config, res)
-
- self.driver.delete_config_options(
- domain, group, option, sensitive)
- self.assertRaises(exception.DomainConfigNotFound,
- self.driver.get_config_option,
- domain, group, option, sensitive)
- # ...and silent if we try to delete it again
- self.driver.delete_config_options(
- domain, group, option, sensitive)
-
- def test_whitelisted_domain_config_crud(self):
- self._domain_config_crud(sensitive=False)
-
- def test_sensitive_domain_config_crud(self):
- self._domain_config_crud(sensitive=True)
-
- def _list_domain_config(self, sensitive):
- """Test listing by combination of domain, group & option."""
- config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- # Put config2 in the same group as config1
- config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- config3 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': 100}
- domain = uuid.uuid4().hex
-
- for config in [config1, config2, config3]:
- self.driver.create_config_option(
- domain, config['group'], config['option'],
- config['value'], sensitive)
-
- # Try listing all items from a domain
- res = self.driver.list_config_options(
- domain, sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(3))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config2, config3])
-
- # Try listing by domain and group
- res = self.driver.list_config_options(
- domain, group=config1['group'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(2))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config2])
-
- # Try listing by domain, group and option
- res = self.driver.list_config_options(
- domain, group=config2['group'],
- option=config2['option'], sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(1))
- self.assertEqual(config2, res[0])
-
- def test_list_whitelisted_domain_config_crud(self):
- self._list_domain_config(False)
-
- def test_list_sensitive_domain_config_crud(self):
- self._list_domain_config(True)
-
- def _delete_domain_configs(self, sensitive):
- """Test deleting by combination of domain, group & option."""
- config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- # Put config2 and config3 in the same group as config1
- config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- config3 = {'group': config1['group'], 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- config4 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- domain = uuid.uuid4().hex
-
- for config in [config1, config2, config3, config4]:
- self.driver.create_config_option(
- domain, config['group'], config['option'],
- config['value'], sensitive)
-
- # Try deleting by domain, group and option
- res = self.driver.delete_config_options(
- domain, group=config2['group'],
- option=config2['option'], sensitive=sensitive)
- res = self.driver.list_config_options(
- domain, sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(3))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config3, config4])
-
- # Try deleting by domain and group
- res = self.driver.delete_config_options(
- domain, group=config4['group'], sensitive=sensitive)
- res = self.driver.list_config_options(
- domain, sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(2))
- for res_entry in res:
- self.assertIn(res_entry, [config1, config3])
-
- # Try deleting all items from a domain
- res = self.driver.delete_config_options(
- domain, sensitive=sensitive)
- res = self.driver.list_config_options(
- domain, sensitive=sensitive)
- self.assertThat(res, matchers.HasLength(0))
-
- def test_delete_whitelisted_domain_configs(self):
- self._delete_domain_configs(False)
-
- def test_delete_sensitive_domain_configs(self):
- self._delete_domain_configs(True)
-
- def _create_domain_config_twice(self, sensitive):
- """Test conflict error thrown if create the same option twice."""
- config = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
- 'value': uuid.uuid4().hex}
- domain = uuid.uuid4().hex
-
- self.driver.create_config_option(
- domain, config['group'], config['option'],
- config['value'], sensitive=sensitive)
- self.assertRaises(exception.Conflict,
- self.driver.create_config_option,
- domain, config['group'], config['option'],
- config['value'], sensitive=sensitive)
-
- def test_create_whitelisted_domain_config_twice(self):
- self._create_domain_config_twice(False)
-
- def test_create_sensitive_domain_config_twice(self):
- self._create_domain_config_twice(True)
-
-
-class DomainConfigTests(object):
-
- def setUp(self):
- self.domain = unit.new_domain_ref()
- self.resource_api.create_domain(self.domain['id'], self.domain)
- self.addCleanup(self.clean_up_domain)
-
- def clean_up_domain(self):
- # NOTE(henry-nash): Deleting the domain will also delete any domain
- # configs for this domain.
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
- self.resource_api.delete_domain(self.domain['id'])
- del self.domain
-
- def test_create_domain_config_including_sensitive_option(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- # password is sensitive, so check that the whitelisted portion and
- # the sensitive piece have been stored in the appropriate locations.
- res = self.domain_config_api.get_config(self.domain['id'])
- config_whitelisted = copy.deepcopy(config)
- config_whitelisted['ldap'].pop('password')
- self.assertEqual(config_whitelisted, res)
- res = self.domain_config_api.driver.get_config_option(
- self.domain['id'], 'ldap', 'password', sensitive=True)
- self.assertEqual(config['ldap']['password'], res['value'])
-
- # Finally, use the non-public API to get back the whole config
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(config, res)
-
- def test_get_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- res = self.domain_config_api.get_config(self.domain['id'],
- group='identity')
- config_partial = copy.deepcopy(config)
- config_partial.pop('ldap')
- self.assertEqual(config_partial, res)
- res = self.domain_config_api.get_config(
- self.domain['id'], group='ldap', option='user_tree_dn')
- self.assertEqual({'user_tree_dn': config['ldap']['user_tree_dn']}, res)
- # ...but we should fail to get a sensitive option
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'],
- group='ldap', option='password')
-
- def test_delete_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- self.domain_config_api.delete_config(
- self.domain['id'], group='identity')
- config_partial = copy.deepcopy(config)
- config_partial.pop('identity')
- config_partial['ldap'].pop('password')
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(config_partial, res)
-
- self.domain_config_api.delete_config(
- self.domain['id'], group='ldap', option='url')
- config_partial = copy.deepcopy(config_partial)
- config_partial['ldap'].pop('url')
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(config_partial, res)
-
- def test_get_options_not_in_domain_config(self):
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'])
- config = {'ldap': {'url': uuid.uuid4().hex}}
-
- self.domain_config_api.create_config(self.domain['id'], config)
-
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'],
- group='identity')
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.get_config, self.domain['id'],
- group='ldap', option='user_tree_dn')
-
- def test_get_sensitive_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual({}, res)
- self.domain_config_api.create_config(self.domain['id'], config)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(config, res)
-
- def test_update_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- # Try updating a group
- new_config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_filter': uuid.uuid4().hex}}
- res = self.domain_config_api.update_config(
- self.domain['id'], new_config, group='ldap')
- expected_config = copy.deepcopy(config)
- expected_config['ldap']['url'] = new_config['ldap']['url']
- expected_config['ldap']['user_filter'] = (
- new_config['ldap']['user_filter'])
- expected_full_config = copy.deepcopy(expected_config)
- expected_config['ldap'].pop('password')
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(expected_config, res)
- # The sensitive option should still exist
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(expected_full_config, res)
-
- # Try updating a single whitelisted option
- self.domain_config_api.delete_config(self.domain['id'])
- self.domain_config_api.create_config(self.domain['id'], config)
- new_config = {'url': uuid.uuid4().hex}
- res = self.domain_config_api.update_config(
- self.domain['id'], new_config, group='ldap', option='url')
-
- # Make sure whitelisted and full config is updated
- expected_whitelisted_config = copy.deepcopy(config)
- expected_whitelisted_config['ldap']['url'] = new_config['url']
- expected_full_config = copy.deepcopy(expected_whitelisted_config)
- expected_whitelisted_config['ldap'].pop('password')
- self.assertEqual(expected_whitelisted_config, res)
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(expected_whitelisted_config, res)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(expected_full_config, res)
-
- # Try updating a single sensitive option
- self.domain_config_api.delete_config(self.domain['id'])
- self.domain_config_api.create_config(self.domain['id'], config)
- new_config = {'password': uuid.uuid4().hex}
- res = self.domain_config_api.update_config(
- self.domain['id'], new_config, group='ldap', option='password')
- # The whitelisted config should not have changed...
- expected_whitelisted_config = copy.deepcopy(config)
- expected_full_config = copy.deepcopy(config)
- expected_whitelisted_config['ldap'].pop('password')
- self.assertEqual(expected_whitelisted_config, res)
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(expected_whitelisted_config, res)
- expected_full_config['ldap']['password'] = new_config['password']
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- # ...but the sensitive piece should have.
- self.assertEqual(expected_full_config, res)
-
- def test_update_invalid_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- # An extra group, when specifying one group should fail
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group='ldap')
- # An extra option, when specifying one option should fail
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config['ldap'],
- group='ldap', option='url')
-
- # Now try the right number of groups/options, but just not
- # ones that are in the config provided
- config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group='identity')
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config['ldap'], group='ldap',
- option='url')
-
- # Now some valid groups/options, but just not ones that are in the
- # existing config
- config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- config_wrong_group = {'identity': {'driver': uuid.uuid4().hex}}
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.update_config,
- self.domain['id'], config_wrong_group,
- group='identity')
- config_wrong_option = {'url': uuid.uuid4().hex}
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.update_config,
- self.domain['id'], config_wrong_option,
- group='ldap', option='url')
-
- # And finally just some bad groups/options
- bad_group = uuid.uuid4().hex
- config = {bad_group: {'user': uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group=bad_group,
- option='user')
- bad_option = uuid.uuid4().hex
- config = {'ldap': {bad_option: uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.update_config,
- self.domain['id'], config, group='ldap',
- option=bad_option)
-
- def test_create_invalid_domain_config(self):
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], {})
- config = {uuid.uuid4().hex: uuid.uuid4().hex}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
- config = {uuid.uuid4().hex: {uuid.uuid4().hex: uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
- config = {'ldap': {uuid.uuid4().hex: uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
- # Try an option that IS in the standard conf, but neither whitelisted
- # or marked as sensitive
- config = {'identity': {'user_tree_dn': uuid.uuid4().hex}}
- self.assertRaises(exception.InvalidDomainConfig,
- self.domain_config_api.create_config,
- self.domain['id'], config)
-
- def test_delete_invalid_partial_domain_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- # Try deleting a group not in the config
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.delete_config,
- self.domain['id'], group='identity')
- # Try deleting an option not in the config
- self.assertRaises(exception.DomainConfigNotFound,
- self.domain_config_api.delete_config,
- self.domain['id'],
- group='ldap', option='user_tree_dn')
-
- def test_sensitive_substitution_in_domain_config(self):
- # Create a config that contains a whitelisted option that requires
- # substitution of a sensitive option.
- config = {'ldap': {'url': 'my_url/%(password)s',
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
-
- # Read back the config with the internal method and ensure that the
- # substitution has taken place.
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- expected_url = (
- config['ldap']['url'] % {'password': config['ldap']['password']})
- self.assertEqual(expected_url, res['ldap']['url'])
-
- def test_invalid_sensitive_substitution_in_domain_config(self):
- """Check that invalid substitutions raise warnings."""
- mock_log = mock.Mock()
-
- invalid_option_config = {
- 'ldap': {'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
-
- for invalid_option in ['my_url/%(passssword)s',
- 'my_url/%(password',
- 'my_url/%(password)',
- 'my_url/%(password)d']:
- invalid_option_config['ldap']['url'] = invalid_option
- self.domain_config_api.create_config(
- self.domain['id'], invalid_option_config)
-
- with mock.patch('keystone.resource.core.LOG', mock_log):
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- mock_log.warning.assert_any_call(mock.ANY)
- self.assertEqual(
- invalid_option_config['ldap']['url'], res['ldap']['url'])
-
- def test_escaped_sequence_in_domain_config(self):
- """Check that escaped '%(' doesn't get interpreted."""
- mock_log = mock.Mock()
-
- escaped_option_config = {
- 'ldap': {'url': 'my_url/%%(password)s',
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
-
- self.domain_config_api.create_config(
- self.domain['id'], escaped_option_config)
-
- with mock.patch('keystone.resource.core.LOG', mock_log):
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertFalse(mock_log.warn.called)
- # The escaping '%' should have been removed
- self.assertEqual('my_url/%(password)s', res['ldap']['url'])
-
- @unit.skip_if_cache_disabled('domain_config')
- def test_cache_layer_get_sensitive_config(self):
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- # cache the result
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id'])
- self.assertEqual(config, res)
-
- # delete, bypassing domain config manager api
- self.domain_config_api.delete_config_options(self.domain['id'])
- self.domain_config_api.delete_config_options(self.domain['id'],
- sensitive=True)
-
- self.assertDictEqual(
- res, self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id']))
- self.domain_config_api.get_config_with_sensitive_info.invalidate(
- self.domain_config_api, self.domain['id'])
- self.assertDictEqual(
- {},
- self.domain_config_api.get_config_with_sensitive_info(
- self.domain['id']))
-
- def test_delete_domain_deletes_configs(self):
- """Test domain deletion clears the domain configs."""
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex}}
- self.domain_config_api.create_config(domain['id'], config)
-
- # Now delete the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
- self.resource_api.delete_domain(domain['id'])
-
- # Check domain configs have also been deleted
- self.assertRaises(
- exception.DomainConfigNotFound,
- self.domain_config_api.get_config,
- domain['id'])
-
- # The get_config_with_sensitive_info does not throw an exception if
- # the config is empty, it just returns an empty dict
- self.assertDictEqual(
- {},
- self.domain_config_api.get_config_with_sensitive_info(
- domain['id']))
-
- def test_config_registration(self):
- type = uuid.uuid4().hex
- self.domain_config_api.obtain_registration(
- self.domain['id'], type)
- self.domain_config_api.release_registration(
- self.domain['id'], type=type)
-
- # Make sure that once someone has it, nobody else can get it.
- # This includes the domain who already has it.
- self.domain_config_api.obtain_registration(
- self.domain['id'], type)
- self.assertFalse(
- self.domain_config_api.obtain_registration(
- self.domain['id'], type))
-
- # Make sure we can read who does have it
- self.assertEqual(
- self.domain['id'],
- self.domain_config_api.read_registration(type))
-
- # Make sure releasing it is silent if the domain specified doesn't
- # have the registration
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- self.domain_config_api.release_registration(
- domain2['id'], type=type)
-
- # If nobody has the type registered, then trying to read it should
- # raise ConfigRegistrationNotFound
- self.domain_config_api.release_registration(
- self.domain['id'], type=type)
- self.assertRaises(exception.ConfigRegistrationNotFound,
- self.domain_config_api.read_registration,
- type)
-
- # Finally check multiple registrations are cleared if you free the
- # registration without specifying the type
- type2 = uuid.uuid4().hex
- self.domain_config_api.obtain_registration(
- self.domain['id'], type)
- self.domain_config_api.obtain_registration(
- self.domain['id'], type2)
- self.domain_config_api.release_registration(self.domain['id'])
- self.assertRaises(exception.ConfigRegistrationNotFound,
- self.domain_config_api.read_registration,
- type)
- self.assertRaises(exception.ConfigRegistrationNotFound,
- self.domain_config_api.read_registration,
- type2)
diff --git a/keystone-moon/keystone/tests/unit/rest.py b/keystone-moon/keystone/tests/unit/rest.py
deleted file mode 100644
index 512c301d..00000000
--- a/keystone-moon/keystone/tests/unit/rest.py
+++ /dev/null
@@ -1,261 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from oslo_serialization import jsonutils
-from six.moves import http_client
-import webtest
-
-from keystone.auth import controllers as auth_controllers
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import database
-
-
-class RestfulTestCase(unit.TestCase):
- """Performs restful tests against the WSGI app over HTTP.
-
- This class launches public & admin WSGI servers for every test, which can
- be accessed by calling ``public_request()`` or ``admin_request()``,
- respectfully.
-
- ``restful_request()`` and ``request()`` methods are also exposed if you
- need to bypass restful conventions or access HTTP details in your test
- implementation.
-
- Three new asserts are provided:
-
- * ``assertResponseSuccessful``: called automatically for every request
- unless an ``expected_status`` is provided
- * ``assertResponseStatus``: called instead of ``assertResponseSuccessful``,
- if an ``expected_status`` is provided
- * ``assertValidResponseHeaders``: validates that the response headers
- appear as expected
-
- Requests are automatically serialized according to the defined
- ``content_type``. Responses are automatically deserialized as well, and
- available in the ``response.body`` attribute. The original body content is
- available in the ``response.raw`` attribute.
-
- """
-
- # default content type to test
- content_type = 'json'
-
- def get_extensions(self):
- return None
-
- def setUp(self, app_conf='keystone'):
- super(RestfulTestCase, self).setUp()
-
- # Will need to reset the plug-ins
- self.addCleanup(setattr, auth_controllers, 'AUTH_METHODS', {})
-
- self.useFixture(database.Database(self.sql_driver_version_overrides))
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- self.public_app = webtest.TestApp(
- self.loadapp(app_conf, name='main'))
- self.addCleanup(delattr, self, 'public_app')
- self.admin_app = webtest.TestApp(
- self.loadapp(app_conf, name='admin'))
- self.addCleanup(delattr, self, 'admin_app')
-
- def request(self, app, path, body=None, headers=None, token=None,
- expected_status=None, **kwargs):
- if headers:
- headers = {str(k): str(v) for k, v in headers.items()}
- else:
- headers = {}
-
- if token:
- headers['X-Auth-Token'] = str(token)
-
- # sets environ['REMOTE_ADDR']
- kwargs.setdefault('remote_addr', 'localhost')
-
- response = app.request(path, headers=headers,
- status=expected_status, body=body,
- **kwargs)
-
- return response
-
- def assertResponseSuccessful(self, response):
- """Asserts that a status code lies inside the 2xx range.
-
- :param response: :py:class:`httplib.HTTPResponse` to be
- verified to have a status code between 200 and 299.
-
- example::
-
- self.assertResponseSuccessful(response)
- """
- self.assertTrue(
- response.status_code >= 200 and response.status_code <= 299,
- 'Status code %d is outside of the expected range (2xx)\n\n%s' %
- (response.status, response.body))
-
- def assertResponseStatus(self, response, expected_status):
- """Asserts a specific status code on the response.
-
- :param response: :py:class:`httplib.HTTPResponse`
- :param expected_status: The specific ``status`` result expected
-
- example::
-
- self.assertResponseStatus(response, http_client.NO_CONTENT)
- """
- self.assertEqual(
- expected_status, response.status_code,
- 'Status code %s is not %s, as expected\n\n%s' %
- (response.status_code, expected_status, response.body))
-
- def assertValidResponseHeaders(self, response):
- """Ensures that response headers appear as expected."""
- self.assertIn('X-Auth-Token', response.headers.get('Vary'))
-
- def assertValidErrorResponse(self, response,
- expected_status=http_client.BAD_REQUEST):
- """Verify that the error response is valid.
-
- Subclasses can override this function based on the expected response.
-
- """
- self.assertEqual(expected_status, response.status_code)
- error = response.result['error']
- self.assertEqual(response.status_code, error['code'])
- self.assertIsNotNone(error.get('title'))
-
- def _to_content_type(self, body, headers, content_type=None):
- """Attempt to encode JSON and XML automatically."""
- content_type = content_type or self.content_type
-
- if content_type == 'json':
- headers['Accept'] = 'application/json'
- if body:
- headers['Content-Type'] = 'application/json'
- # NOTE(davechen):dump the body to bytes since WSGI requires
- # the body of the response to be `Bytestrings`.
- # see pep-3333:
- # https://www.python.org/dev/peps/pep-3333/#a-note-on-string-types
- return jsonutils.dump_as_bytes(body)
-
- def _from_content_type(self, response, content_type=None):
- """Attempt to decode JSON and XML automatically, if detected."""
- content_type = content_type or self.content_type
-
- if response.body is not None and response.body.strip():
- # if a body is provided, a Content-Type is also expected
- header = response.headers.get('Content-Type')
- self.assertIn(content_type, header)
-
- if content_type == 'json':
- response.result = jsonutils.loads(response.body)
- else:
- response.result = response.body
-
- def restful_request(self, method='GET', headers=None, body=None,
- content_type=None, response_content_type=None,
- **kwargs):
- """Serializes/deserializes json as request/response body.
-
- .. WARNING::
-
- * Existing Accept header will be overwritten.
- * Existing Content-Type header will be overwritten.
-
- """
- # Initialize headers dictionary
- headers = {} if not headers else headers
-
- body = self._to_content_type(body, headers, content_type)
-
- # Perform the HTTP request/response
- response = self.request(method=method, headers=headers, body=body,
- **kwargs)
-
- response_content_type = response_content_type or content_type
- self._from_content_type(response, content_type=response_content_type)
-
- # we can save some code & improve coverage by always doing this
- if (method != 'HEAD' and
- response.status_code >= http_client.BAD_REQUEST):
- self.assertValidErrorResponse(response)
-
- # Contains the decoded response.body
- return response
-
- def _request(self, convert=True, **kwargs):
- if convert:
- response = self.restful_request(**kwargs)
- else:
- response = self.request(**kwargs)
-
- self.assertValidResponseHeaders(response)
- return response
-
- def public_request(self, **kwargs):
- return self._request(app=self.public_app, **kwargs)
-
- def admin_request(self, **kwargs):
- return self._request(app=self.admin_app, **kwargs)
-
- def _get_token(self, body):
- """Convenience method so that we can test authenticated requests."""
- r = self.public_request(method='POST', path='/v2.0/tokens', body=body)
- return self._get_token_id(r)
-
- def get_admin_token(self):
- return self._get_token({
- 'auth': {
- 'passwordCredentials': {
- 'username': self.user_reqadmin['name'],
- 'password': self.user_reqadmin['password']
- },
- 'tenantId': default_fixtures.SERVICE_TENANT_ID
- }
- })
-
- def get_unscoped_token(self):
- """Convenience method so that we can test authenticated requests."""
- return self._get_token({
- 'auth': {
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': self.user_foo['password'],
- },
- },
- })
-
- def get_scoped_token(self, tenant_id=None):
- """Convenience method so that we can test authenticated requests."""
- if not tenant_id:
- tenant_id = self.tenant_bar['id']
- return self._get_token({
- 'auth': {
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': self.user_foo['password'],
- },
- 'tenantId': tenant_id,
- },
- })
-
- def _get_token_id(self, r):
- """Helper method to return a token ID from a response.
-
- This needs to be overridden by child classes for on their content type.
-
- """
- raise NotImplementedError()
diff --git a/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml b/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml
deleted file mode 100644
index db235f7c..00000000
--- a/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<ns0:EntityDescriptor xmlns:ns0="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:ns1="http://www.w3.org/2000/09/xmldsig#" entityID="k2k.com/v3/OS-FEDERATION/idp" validUntil="2014-08-19T21:24:17.411289Z">
- <ns0:IDPSSODescriptor protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
- <ns0:KeyDescriptor use="signing">
- <ns1:KeyInfo>
- <ns1:X509Data>
- <ns1:X509Certificate>MIIDpTCCAo0CAREwDQYJKoZIhvcNAQEFBQAwgZ4xCjAIBgNVBAUTATUxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU3Vubnl2YWxlMRIwEAYDVQQKEwlPcGVuU3RhY2sxETAPBgNVBAsTCEtleXN0b25lMSUwIwYJKoZIhvcNAQkBFhZrZXlzdG9uZUBvcGVuc3RhY2sub3JnMRQwEgYDVQQDEwtTZWxmIFNpZ25lZDAgFw0xMzA3MDkxNjI1MDBaGA8yMDcyMDEwMTE2MjUwMFowgY8xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU3Vubnl2YWxlMRIwEAYDVQQKEwlPcGVuU3RhY2sxETAPBgNVBAsTCEtleXN0b25lMSUwIwYJKoZIhvcNAQkBFhZrZXlzdG9uZUBvcGVuc3RhY2sub3JnMREwDwYDVQQDEwhLZXlzdG9uZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMTC6IdNd9Cg1DshcrT5gRVRF36nEmjSA9QWdik7B925PK70U4F6j4pz/5JL7plIo/8rJ4jJz9ccE7m0iA+IuABtEhEwXkG9rj47Oy0J4ZyDGSh2K1Bl78PA9zxXSzysUTSjBKdAh29dPYbJY7cgZJ0uC3AtfVceYiAOIi14SdFeZ0LZLDXBuLaqUmSMrmKwJ9wAMOCb/jbBP9/3Ycd0GYjlvrSBU4Bqb8/NHasyO4DpPN68OAoyD5r5jUtV8QZN03UjIsoux8e0lrL6+MVtJo0OfWvlSrlzS5HKSryY+uqqQEuxtZKpJM2MV85ujvjc8eDSChh2shhDjBem3FIlHKUCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAed9fHgdJrk+gZcO5gsqq6uURfDOuYD66GsSdZw4BqHjYAcnyWq2da+iw7Uxkqu7iLf2k4+Hu3xjDFrce479OwZkSnbXmqB7XspTGOuM8MgT7jB/ypKTOZ6qaZKSWK1Hta995hMrVVlhUNBLh0MPGqoVWYA4d7mblujgH9vp+4mpCciJagHks8K5FBmI+pobB+uFdSYDoRzX9LTpStspK4e3IoY8baILuGcdKimRNBv6ItG4hMrntAe1/nWMJyUu5rDTGf2V/vAaS0S/faJBwQSz1o38QHMTWHNspfwIdX3yMqI9u7/vYlz3rLy5WdBdUgZrZ3/VLmJTiJVZu5Owq4Q==
-</ns1:X509Certificate>
- </ns1:X509Data>
- </ns1:KeyInfo>
- </ns0:KeyDescriptor>
- </ns0:IDPSSODescriptor>
- <ns0:Organization>
- <ns0:OrganizationName xml:lang="en">openstack</ns0:OrganizationName>
- <ns0:OrganizationDisplayName xml:lang="en">openstack</ns0:OrganizationDisplayName>
- <ns0:OrganizationURL xml:lang="en">openstack</ns0:OrganizationURL>
- </ns0:Organization>
- <ns0:ContactPerson contactType="technical">
- <ns0:Company>openstack</ns0:Company>
- <ns0:GivenName>first</ns0:GivenName>
- <ns0:SurName>lastname</ns0:SurName>
- <ns0:EmailAddress>admin@example.com</ns0:EmailAddress>
- <ns0:TelephoneNumber>555-555-5555</ns0:TelephoneNumber>
- </ns0:ContactPerson>
-</ns0:EntityDescriptor>
diff --git a/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml b/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml
deleted file mode 100644
index 414ff9cf..00000000
--- a/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml
+++ /dev/null
@@ -1,69 +0,0 @@
-<ns0:Assertion xmlns:ns0="urn:oasis:names:tc:SAML:2.0:assertion" xmlns:ns1="http://www.w3.org/2000/09/xmldsig#" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ID="9a22528bfe194b2880edce5d60414d6a" IssueInstant="2014-08-19T10:53:57Z" Version="2.0">
- <ns0:Issuer Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity">https://acme.com/FIM/sps/openstack/saml20</ns0:Issuer>
- <ns1:Signature>
- <ns1:SignedInfo>
- <ns1:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#" />
- <ns1:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1" />
- <ns1:Reference URI="#9a22528bfe194b2880edce5d60414d6a">
- <ns1:Transforms>
- <ns1:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature" />
- <ns1:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#" />
- </ns1:Transforms>
- <ns1:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1" />
- <ns1:DigestValue>Lem2TKyYt+/tJy2iSos1t0KxcJE=</ns1:DigestValue>
- </ns1:Reference>
- </ns1:SignedInfo>
- <ns1:SignatureValue>b//GXtGeCIJPFsMAHrx4+3yjrL4smSpRLXG9PB3TLMJvU4fx8n2PzK7+VbtWNbZG
-vSgbvbQR52jq77iyaRfQ2iELuFEY+YietLRi7hsitkJCEayPmU+BDlNIGuCXZjAy
-7tmtGFkLlZZJaom1jAzHfZ5JPjZdM5hvQwrhCI2Kzyk=</ns1:SignatureValue>
- <ns1:KeyInfo>
- <ns1:X509Data>
- <ns1:X509Certificate>MIICtjCCAh+gAwIBAgIJAJTeBUN2i9ZNMA0GCSqGSIb3DQEBBQUAME4xCzAJBgNV
-BAYTAkhSMQ8wDQYDVQQIEwZaYWdyZWIxITAfBgNVBAoTGE5la2Egb3JnYW5pemFj
-aWphIGQuby5vLjELMAkGA1UEAxMCQ0EwHhcNMTIxMjI4MTYwODA1WhcNMTQxMjI4
-MTYwODA1WjBvMQswCQYDVQQGEwJIUjEPMA0GA1UECBMGWmFncmViMQ8wDQYDVQQH
-EwZaYWdyZWIxITAfBgNVBAoTGE5la2Egb3JnYW5pemFjaWphIGQuby5vLjEbMBkG
-A1UEAxMSUHJvZ3JhbWVyc2thIGZpcm1hMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB
-iQKBgQCgWApHV5cma0GY/v/vmwgciDQBgITcitx2rG0F+ghXtGiEJeK75VY7jQwE
-UFCbgV+AaOY2NQChK2FKec7Hss/5y+jbWfX2yVwX6TYcCwnOGXenz+cgx2Fwqpu3
-ncL6dYJMfdbKvojBaJQLJTaNjRJsZACButDsDtXDSH9QaRy+hQIDAQABo3sweTAJ
-BgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0
-aWZpY2F0ZTAdBgNVHQ4EFgQUSo9ThP/MOg8QIRWxoPo8qKR8O2wwHwYDVR0jBBgw
-FoAUAelckr4bx8MwZ7y+VlHE46Mbo+cwDQYJKoZIhvcNAQEFBQADgYEAy19Z7Z5/
-/MlWkogu41s0RxL9ffG60QQ0Y8hhDTmgHNx1itj0wT8pB7M4KVMbZ4hjjSFsfRq4
-Vj7jm6LwU0WtZ3HGl8TygTh8AAJvbLROnTjLL5MqI9d9pKvIIfZ2Qs3xmJ7JEv4H
-UHeBXxQq/GmfBv3l+V5ObQ+EHKnyDodLHCk=</ns1:X509Certificate>
- </ns1:X509Data>
- </ns1:KeyInfo>
- </ns1:Signature>
- <ns0:Subject>
- <ns0:NameID Format="urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress">test_user</ns0:NameID>
- <ns0:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
- <ns0:SubjectConfirmationData NotOnOrAfter="2014-08-19T11:53:57.243106Z" Recipient="http://beta.com/Shibboleth.sso/SAML2/POST" />
- </ns0:SubjectConfirmation>
- </ns0:Subject>
- <ns0:AuthnStatement AuthnInstant="2014-08-19T10:53:57Z" SessionIndex="4e3430a9f8b941e69c133293a7a960a1" SessionNotOnOrAfter="2014-08-19T11:53:57.243106Z">
- <ns0:AuthnContext>
- <ns0:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</ns0:AuthnContextClassRef>
- <ns0:AuthenticatingAuthority>https://acme.com/FIM/sps/openstack/saml20</ns0:AuthenticatingAuthority>
- </ns0:AuthnContext>
- </ns0:AuthnStatement>
- <ns0:AttributeStatement>
- <ns0:Attribute Name="openstack_user" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
- <ns0:AttributeValue xsi:type="xs:string">test_user</ns0:AttributeValue>
- </ns0:Attribute>
- <ns0:Attribute Name="openstack_user_domain" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
- <ns0:AttributeValue xsi:type="xs:string">user_domain</ns0:AttributeValue>
- </ns0:Attribute>
- <ns0:Attribute Name="openstack_roles" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
- <ns0:AttributeValue xsi:type="xs:string">admin</ns0:AttributeValue>
- <ns0:AttributeValue xsi:type="xs:string">member</ns0:AttributeValue>
- </ns0:Attribute>
- <ns0:Attribute Name="openstack_project" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
- <ns0:AttributeValue xsi:type="xs:string">development</ns0:AttributeValue>
- </ns0:Attribute>
- <ns0:Attribute Name="openstack_project_domain" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:uri">
- <ns0:AttributeValue xsi:type="xs:string">project_domain</ns0:AttributeValue>
- </ns0:Attribute>
- </ns0:AttributeStatement>
-</ns0:Assertion>
diff --git a/keystone-moon/keystone/tests/unit/schema/__init__.py b/keystone-moon/keystone/tests/unit/schema/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/schema/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/schema/v2.py b/keystone-moon/keystone/tests/unit/schema/v2.py
deleted file mode 100644
index ed260a00..00000000
--- a/keystone-moon/keystone/tests/unit/schema/v2.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import copy
-
-from keystone.common import validation
-from keystone.common.validation import parameter_types
-from keystone.common.validation import validators
-
-
-_project_properties = {
- 'id': parameter_types.id_string,
- 'name': parameter_types.name,
- 'enabled': parameter_types.boolean,
- 'description': validation.nullable(parameter_types.description),
-}
-
-_token_properties = {
- 'audit_ids': {
- 'type': 'array',
- 'items': {
- 'type': 'string',
- },
- 'minItems': 1,
- 'maxItems': 2,
- },
- 'id': {'type': 'string'},
- 'expires': {'type': 'string'},
- 'issued_at': {'type': 'string'},
- 'tenant': {
- 'type': 'object',
- 'properties': _project_properties,
- 'required': ['id', 'name', 'enabled'],
- 'additionalProperties': False,
- },
-}
-
-_role_properties = {
- 'name': parameter_types.name,
-}
-
-_user_properties = {
- 'id': parameter_types.id_string,
- 'name': parameter_types.name,
- 'username': parameter_types.name,
- 'roles': {
- 'type': 'array',
- 'items': {
- 'type': 'object',
- 'properties': _role_properties,
- 'required': ['name'],
- 'additionalProperties': False,
- },
- },
- 'roles_links': {
- 'type': 'array',
- 'maxItems': 0,
- },
-}
-
-_metadata_properties = {
- 'is_admin': {'type': 'integer'},
- 'roles': {
- 'type': 'array',
- 'items': {'type': 'string'},
- },
-}
-
-_endpoint_properties = {
- 'id': {'type': 'string'},
- 'adminURL': parameter_types.url,
- 'internalURL': parameter_types.url,
- 'publicURL': parameter_types.url,
- 'region': {'type': 'string'},
-}
-
-_service_properties = {
- 'type': {'type': 'string'},
- 'name': parameter_types.name,
- 'endpoints_links': {
- 'type': 'array',
- 'maxItems': 0,
- },
- 'endpoints': {
- 'type': 'array',
- 'minItems': 1,
- 'items': {
- 'type': 'object',
- 'properties': _endpoint_properties,
- 'required': ['id', 'publicURL'],
- 'additionalProperties': False,
- },
- },
-}
-
-_base_access_properties = {
- 'metadata': {
- 'type': 'object',
- 'properties': _metadata_properties,
- 'required': ['is_admin', 'roles'],
- 'additionalProperties': False,
- },
- 'serviceCatalog': {
- 'type': 'array',
- 'items': {
- 'type': 'object',
- 'properties': _service_properties,
- 'required': ['name', 'type', 'endpoints_links', 'endpoints'],
- 'additionalProperties': False,
- },
- },
- 'token': {
- 'type': 'object',
- 'properties': _token_properties,
- 'required': ['audit_ids', 'id', 'expires', 'issued_at'],
- 'additionalProperties': False,
- },
- 'user': {
- 'type': 'object',
- 'properties': _user_properties,
- 'required': ['id', 'name', 'username', 'roles', 'roles_links'],
- 'additionalProperties': False,
- },
-}
-
-_unscoped_access_properties = copy.deepcopy(_base_access_properties)
-unscoped_metadata = _unscoped_access_properties['metadata']
-unscoped_metadata['properties']['roles']['maxItems'] = 0
-_unscoped_access_properties['user']['properties']['roles']['maxItems'] = 0
-_unscoped_access_properties['serviceCatalog']['maxItems'] = 0
-
-_scoped_access_properties = copy.deepcopy(_base_access_properties)
-_scoped_access_properties['metadata']['properties']['roles']['minItems'] = 1
-_scoped_access_properties['serviceCatalog']['minItems'] = 1
-_scoped_access_properties['user']['properties']['roles']['minItems'] = 1
-
-base_token_schema = {
- 'type': 'object',
- 'required': ['metadata', 'user', 'serviceCatalog', 'token'],
- 'additionalProperties': False,
-}
-
-unscoped_token_schema = copy.deepcopy(base_token_schema)
-unscoped_token_schema['properties'] = _unscoped_access_properties
-
-scoped_token_schema = copy.deepcopy(base_token_schema)
-scoped_token_schema['properties'] = _scoped_access_properties
-
-# Validator objects
-unscoped_validator = validators.SchemaValidator(unscoped_token_schema)
-scoped_validator = validators.SchemaValidator(scoped_token_schema)
diff --git a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
deleted file mode 100644
index 79065863..00000000
--- a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
+++ /dev/null
@@ -1,1391 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-import mock
-from oslo_log import versionutils
-from six.moves import http_client
-from testtools import matchers
-
-from keystone.contrib.endpoint_filter import routers
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-class EndpointFilterTestCase(test_v3.RestfulTestCase):
-
- def config_overrides(self):
- super(EndpointFilterTestCase, self).config_overrides()
- self.config_fixture.config(
- group='catalog', driver='endpoint_filter.sql')
-
- def setUp(self):
- super(EndpointFilterTestCase, self).setUp()
- self.default_request_url = (
- '/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': self.endpoint_id})
-
-
-class EndpointFilterDeprecateTestCase(test_v3.RestfulTestCase):
-
- @mock.patch.object(versionutils, 'report_deprecated_feature')
- def test_exception_happens(self, mock_deprecator):
- routers.EndpointFilterExtension(mock.ANY)
- mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("Remove endpoint_filter_extension from", args[1])
-
-
-class EndpointFilterCRUDTestCase(EndpointFilterTestCase):
-
- def test_create_endpoint_project_association(self):
- """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Valid endpoint and project id test case.
-
- """
- self.put(self.default_request_url)
-
- def test_create_endpoint_project_association_with_invalid_project(self):
- """PUT OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Invalid project id test case.
-
- """
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': uuid.uuid4().hex,
- 'endpoint_id': self.endpoint_id},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_endpoint_project_association_with_invalid_endpoint(self):
- """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Invalid endpoint id test case.
-
- """
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_endpoint_project_association_with_unexpected_body(self):
- """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Unexpected body in request. The body should be ignored.
-
- """
- self.put(self.default_request_url,
- body={'project_id': self.default_domain_project_id})
-
- def test_check_endpoint_project_association(self):
- """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Valid project and endpoint id test case.
-
- """
- self.put(self.default_request_url)
- self.head('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': self.endpoint_id})
-
- def test_check_endpoint_project_association_with_invalid_project(self):
- """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Invalid project id test case.
-
- """
- self.put(self.default_request_url)
- self.head('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': uuid.uuid4().hex,
- 'endpoint_id': self.endpoint_id},
- expected_status=http_client.NOT_FOUND)
-
- def test_check_endpoint_project_association_with_invalid_endpoint(self):
- """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Invalid endpoint id test case.
-
- """
- self.put(self.default_request_url)
- self.head('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_list_endpoints_associated_with_valid_project(self):
- """GET /OS-EP-FILTER/projects/{project_id}/endpoints
-
- Valid project and endpoint id test case.
-
- """
- self.put(self.default_request_url)
- resource_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
- 'project_id': self.default_domain_project_id}
- r = self.get(resource_url)
- self.assertValidEndpointListResponse(r, self.endpoint,
- resource_url=resource_url)
-
- def test_list_endpoints_associated_with_invalid_project(self):
- """GET /OS-EP-FILTER/projects/{project_id}/endpoints
-
- Invalid project id test case.
-
- """
- self.put(self.default_request_url)
- self.get('/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
- 'project_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_list_projects_associated_with_endpoint(self):
- """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
-
- Valid endpoint-project association test case.
-
- """
- self.put(self.default_request_url)
- resource_url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % {
- 'endpoint_id': self.endpoint_id}
- r = self.get(resource_url)
- self.assertValidProjectListResponse(r, self.default_domain_project,
- resource_url=resource_url)
-
- def test_list_projects_with_no_endpoint_project_association(self):
- """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
-
- Valid endpoint id but no endpoint-project associations test case.
-
- """
- r = self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
- {'endpoint_id': self.endpoint_id})
- self.assertValidProjectListResponse(r, expected_length=0)
-
- def test_list_projects_associated_with_invalid_endpoint(self):
- """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects
-
- Invalid endpoint id test case.
-
- """
- self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
- {'endpoint_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_remove_endpoint_project_association(self):
- """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Valid project id and endpoint id test case.
-
- """
- self.put(self.default_request_url)
- self.delete('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': self.endpoint_id})
-
- def test_remove_endpoint_project_association_with_invalid_project(self):
- """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Invalid project id test case.
-
- """
- self.put(self.default_request_url)
- self.delete('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': uuid.uuid4().hex,
- 'endpoint_id': self.endpoint_id},
- expected_status=http_client.NOT_FOUND)
-
- def test_remove_endpoint_project_association_with_invalid_endpoint(self):
- """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
-
- Invalid endpoint id test case.
-
- """
- self.put(self.default_request_url)
- self.delete('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_endpoint_project_association_cleanup_when_project_deleted(self):
- self.put(self.default_request_url)
- association_url = ('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
- {'endpoint_id': self.endpoint_id})
- r = self.get(association_url)
- self.assertValidProjectListResponse(r, expected_length=1)
-
- self.delete('/projects/%(project_id)s' % {
- 'project_id': self.default_domain_project_id})
-
- r = self.get(association_url)
- self.assertValidProjectListResponse(r, expected_length=0)
-
- def test_endpoint_project_association_cleanup_when_endpoint_deleted(self):
- self.put(self.default_request_url)
- association_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
- 'project_id': self.default_domain_project_id}
- r = self.get(association_url)
- self.assertValidEndpointListResponse(r, expected_length=1)
-
- self.delete('/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id})
-
- r = self.get(association_url)
- self.assertValidEndpointListResponse(r, expected_length=0)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_create_endpoint_project_association_invalidates_cache(self):
- # NOTE(davechen): create another endpoint which will be added to
- # default project, this should be done at first since
- # `create_endpoint` will also invalidate cache.
- endpoint_id2 = uuid.uuid4().hex
- endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
- region_id=self.region_id,
- interface='public',
- id=endpoint_id2)
- self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
-
- # create endpoint project association.
- self.put(self.default_request_url)
-
- # should get back only one endpoint that was just created.
- user_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- # there is only one endpoints associated with the default project.
- self.assertEqual(1, len(catalog[0]['endpoints']))
- self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
-
- # add the second endpoint to default project, bypassing
- # catalog_api API manager.
- self.catalog_api.driver.add_endpoint_to_project(
- endpoint_id2,
- self.default_domain_project_id)
-
- # but, we can just get back one endpoint from the cache, since the
- # catalog is pulled out from cache and its haven't been invalidated.
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertEqual(1, len(catalog[0]['endpoints']))
-
- # remove the endpoint2 from the default project, and add it again via
- # catalog_api API manager.
- self.catalog_api.driver.remove_endpoint_from_project(
- endpoint_id2,
- self.default_domain_project_id)
-
- # add second endpoint to default project, this can be done by calling
- # the catalog_api API manager directly but call the REST API
- # instead for consistency.
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': endpoint_id2})
-
- # should get back two endpoints since the cache has been
- # invalidated when the second endpoint was added to default project.
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertEqual(2, len(catalog[0]['endpoints']))
-
- ep_id_list = [catalog[0]['endpoints'][0]['id'],
- catalog[0]['endpoints'][1]['id']]
- self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_remove_endpoint_from_project_invalidates_cache(self):
- endpoint_id2 = uuid.uuid4().hex
- endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
- region_id=self.region_id,
- interface='public',
- id=endpoint_id2)
- self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
- # create endpoint project association.
- self.put(self.default_request_url)
-
- # add second endpoint to default project.
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': endpoint_id2})
-
- # should get back only one endpoint that was just created.
- user_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- # there are two endpoints associated with the default project.
- ep_id_list = [catalog[0]['endpoints'][0]['id'],
- catalog[0]['endpoints'][1]['id']]
- self.assertEqual(2, len(catalog[0]['endpoints']))
- self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
-
- # remove the endpoint2 from the default project, bypassing
- # catalog_api API manager.
- self.catalog_api.driver.remove_endpoint_from_project(
- endpoint_id2,
- self.default_domain_project_id)
-
- # but, we can just still get back two endpoints from the cache,
- # since the catalog is pulled out from cache and its haven't
- # been invalidated.
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertEqual(2, len(catalog[0]['endpoints']))
-
- # add back the endpoint2 to the default project, and remove it by
- # catalog_api API manage.
- self.catalog_api.driver.add_endpoint_to_project(
- endpoint_id2,
- self.default_domain_project_id)
-
- # remove the endpoint2 from the default project, this can be done
- # by calling the catalog_api API manager directly but call
- # the REST API instead for consistency.
- self.delete('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.default_domain_project_id,
- 'endpoint_id': endpoint_id2})
-
- # should only get back one endpoint since the cache has been
- # invalidated after the endpoint project association was removed.
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertEqual(1, len(catalog[0]['endpoints']))
- self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
-
-
-class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase):
-
- def test_project_scoped_token_using_endpoint_filter(self):
- """Verify endpoints from project scoped token filtered."""
- # create a project to work with
- ref = unit.new_project_ref(domain_id=self.domain_id)
- r = self.post('/projects', body={'project': ref})
- project = self.assertValidProjectResponse(r, ref)
-
- # grant the user a role on the project
- self.put(
- '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
- 'user_id': self.user['id'],
- 'project_id': project['id'],
- 'role_id': self.role['id']})
-
- # set the user's preferred project
- body = {'user': {'default_project_id': project['id']}}
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': self.user['id']},
- body=body)
- self.assertValidUserResponse(r)
-
- # add one endpoint to the project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': project['id'],
- 'endpoint_id': self.endpoint_id})
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.post('/auth/tokens', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=True,
- endpoint_filter=True,
- ep_filter_assoc=1)
- self.assertEqual(project['id'], r.result['token']['project']['id'])
-
- def test_default_scoped_token_using_endpoint_filter(self):
- """Verify endpoints from default scoped token filtered."""
- # add one endpoint to default project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id})
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.post('/auth/tokens', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=True,
- endpoint_filter=True,
- ep_filter_assoc=1)
- self.assertEqual(self.project['id'],
- r.result['token']['project']['id'])
-
- # Ensure name of the service exists
- self.assertIn('name', r.result['token']['catalog'][0])
-
- # region and region_id should be the same in endpoints
- endpoint = r.result['token']['catalog'][0]['endpoints'][0]
- self.assertIn('region', endpoint)
- self.assertIn('region_id', endpoint)
- self.assertEqual(endpoint['region'], endpoint['region_id'])
-
- def test_scoped_token_with_no_catalog_using_endpoint_filter(self):
- """Verify endpoint filter does not affect no catalog."""
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id})
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.post('/auth/tokens?nocatalog', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=False)
- self.assertEqual(self.project['id'],
- r.result['token']['project']['id'])
-
- def test_invalid_endpoint_project_association(self):
- """Verify an invalid endpoint-project association is handled."""
- # add first endpoint to default project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id})
-
- # create a second temporary endpoint
- endpoint_id2 = uuid.uuid4().hex
- endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
- region_id=self.region_id,
- interface='public',
- id=endpoint_id2)
- self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
-
- # add second endpoint to default project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': endpoint_id2})
-
- # remove the temporary reference
- # this will create inconsistency in the endpoint filter table
- # which is fixed during the catalog creation for token request
- self.catalog_api.delete_endpoint(endpoint_id2)
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.post('/auth/tokens', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=True,
- endpoint_filter=True,
- ep_filter_assoc=1)
- self.assertEqual(self.project['id'],
- r.result['token']['project']['id'])
-
- def test_disabled_endpoint(self):
- """Test that a disabled endpoint is handled."""
- # Add an enabled endpoint to the default project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id})
-
- # Add a disabled endpoint to the default project.
-
- # Create a disabled endpoint that's like the enabled one.
- disabled_endpoint_ref = copy.copy(self.endpoint)
- disabled_endpoint_id = uuid.uuid4().hex
- disabled_endpoint_ref.update({
- 'id': disabled_endpoint_id,
- 'enabled': False,
- 'interface': 'internal'
- })
- self.catalog_api.create_endpoint(disabled_endpoint_id,
- disabled_endpoint_ref)
-
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': disabled_endpoint_id})
-
- # Authenticate to get token with catalog
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.post('/auth/tokens', body=auth_data)
-
- endpoints = r.result['token']['catalog'][0]['endpoints']
- endpoint_ids = [ep['id'] for ep in endpoints]
- self.assertEqual([self.endpoint_id], endpoint_ids)
-
- def test_multiple_endpoint_project_associations(self):
-
- def _create_an_endpoint():
- endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- r = self.post('/endpoints', body={'endpoint': endpoint_ref})
- return r.result['endpoint']['id']
-
- # create three endpoints
- endpoint_id1 = _create_an_endpoint()
- endpoint_id2 = _create_an_endpoint()
- _create_an_endpoint()
-
- # only associate two endpoints with project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': endpoint_id1})
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': endpoint_id2})
-
- # there should be only two endpoints in token catalog
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.post('/auth/tokens', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- r,
- require_catalog=True,
- endpoint_filter=True,
- ep_filter_assoc=2)
-
- def test_get_auth_catalog_using_endpoint_filter(self):
- # add one endpoint to default project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id})
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- token_data = self.post('/auth/tokens', body=auth_data)
- self.assertValidProjectScopedTokenResponse(
- token_data,
- require_catalog=True,
- endpoint_filter=True,
- ep_filter_assoc=1)
-
- auth_catalog = self.get('/auth/catalog',
- token=token_data.headers['X-Subject-Token'])
- self.assertEqual(token_data.result['token']['catalog'],
- auth_catalog.result['catalog'])
-
-
-class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin):
- JSON_HOME_DATA = {
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
- '1.0/rel/endpoint_projects': {
- 'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects',
- 'href-vars': {
- 'endpoint_id':
- 'http://docs.openstack.org/api/openstack-identity/3/param/'
- 'endpoint_id',
- },
- },
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
- '1.0/rel/endpoint_groups': {
- 'href': '/OS-EP-FILTER/endpoint_groups',
- },
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
- '1.0/rel/endpoint_group': {
- 'href-template': '/OS-EP-FILTER/endpoint_groups/'
- '{endpoint_group_id}',
- 'href-vars': {
- 'endpoint_group_id':
- 'http://docs.openstack.org/api/openstack-identity/3/'
- 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
- },
- },
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
- '1.0/rel/endpoint_group_to_project_association': {
- 'href-template': '/OS-EP-FILTER/endpoint_groups/'
- '{endpoint_group_id}/projects/{project_id}',
- 'href-vars': {
- 'project_id':
- 'http://docs.openstack.org/api/openstack-identity/3/param/'
- 'project_id',
- 'endpoint_group_id':
- 'http://docs.openstack.org/api/openstack-identity/3/'
- 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
- },
- },
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
- '1.0/rel/projects_associated_with_endpoint_group': {
- 'href-template': '/OS-EP-FILTER/endpoint_groups/'
- '{endpoint_group_id}/projects',
- 'href-vars': {
- 'endpoint_group_id':
- 'http://docs.openstack.org/api/openstack-identity/3/'
- 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
- },
- },
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
- '1.0/rel/endpoints_in_endpoint_group': {
- 'href-template': '/OS-EP-FILTER/endpoint_groups/'
- '{endpoint_group_id}/endpoints',
- 'href-vars': {
- 'endpoint_group_id':
- 'http://docs.openstack.org/api/openstack-identity/3/'
- 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id',
- },
- },
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
- '1.0/rel/project_endpoint_groups': {
- 'href-template': '/OS-EP-FILTER/projects/{project_id}/'
- 'endpoint_groups',
- 'href-vars': {
- 'project_id':
- 'http://docs.openstack.org/api/openstack-identity/3/param/'
- 'project_id',
- },
- },
- }
-
-
-class EndpointGroupCRUDTestCase(EndpointFilterTestCase):
-
- DEFAULT_ENDPOINT_GROUP_BODY = {
- 'endpoint_group': {
- 'description': 'endpoint group description',
- 'filters': {
- 'interface': 'admin'
- },
- 'name': 'endpoint_group_name'
- }
- }
-
- DEFAULT_ENDPOINT_GROUP_URL = '/OS-EP-FILTER/endpoint_groups'
-
- def test_create_endpoint_group(self):
- """POST /OS-EP-FILTER/endpoint_groups
-
- Valid endpoint group test case.
-
- """
- r = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
- body=self.DEFAULT_ENDPOINT_GROUP_BODY)
- expected_filters = (self.DEFAULT_ENDPOINT_GROUP_BODY
- ['endpoint_group']['filters'])
- expected_name = (self.DEFAULT_ENDPOINT_GROUP_BODY
- ['endpoint_group']['name'])
- self.assertEqual(expected_filters,
- r.result['endpoint_group']['filters'])
- self.assertEqual(expected_name, r.result['endpoint_group']['name'])
- self.assertThat(
- r.result['endpoint_group']['links']['self'],
- matchers.EndsWith(
- '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': r.result['endpoint_group']['id']}))
-
- def test_create_invalid_endpoint_group(self):
- """POST /OS-EP-FILTER/endpoint_groups
-
- Invalid endpoint group creation test case.
-
- """
- invalid_body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
- invalid_body['endpoint_group']['filters'] = {'foobar': 'admin'}
- self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
- body=invalid_body,
- expected_status=http_client.BAD_REQUEST)
-
- def test_get_endpoint_group(self):
- """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
-
- Valid endpoint group test case.
-
- """
- # create an endpoint group to work with
- response = self.post(self.DEFAULT_ENDPOINT_GROUP_URL,
- body=self.DEFAULT_ENDPOINT_GROUP_BODY)
- endpoint_group_id = response.result['endpoint_group']['id']
- endpoint_group_filters = response.result['endpoint_group']['filters']
- endpoint_group_name = response.result['endpoint_group']['name']
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.get(url)
- self.assertEqual(endpoint_group_id,
- response.result['endpoint_group']['id'])
- self.assertEqual(endpoint_group_filters,
- response.result['endpoint_group']['filters'])
- self.assertEqual(endpoint_group_name,
- response.result['endpoint_group']['name'])
- self.assertThat(response.result['endpoint_group']['links']['self'],
- matchers.EndsWith(url))
-
- def test_get_invalid_endpoint_group(self):
- """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
-
- Invalid endpoint group test case.
-
- """
- endpoint_group_id = 'foobar'
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_check_endpoint_group(self):
- """HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}
-
- Valid endpoint_group_id test case.
-
- """
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.head(url, expected_status=http_client.OK)
-
- def test_check_invalid_endpoint_group(self):
- """HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}
-
- Invalid endpoint_group_id test case.
-
- """
- endpoint_group_id = 'foobar'
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.head(url, expected_status=http_client.NOT_FOUND)
-
- def test_patch_endpoint_group(self):
- """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
-
- Valid endpoint group patch test case.
-
- """
- body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
- body['endpoint_group']['filters'] = {'region_id': 'UK'}
- body['endpoint_group']['name'] = 'patch_test'
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- r = self.patch(url, body=body)
- self.assertEqual(endpoint_group_id,
- r.result['endpoint_group']['id'])
- self.assertEqual(body['endpoint_group']['filters'],
- r.result['endpoint_group']['filters'])
- self.assertThat(r.result['endpoint_group']['links']['self'],
- matchers.EndsWith(url))
-
- def test_patch_nonexistent_endpoint_group(self):
- """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
-
- Invalid endpoint group patch test case.
-
- """
- body = {
- 'endpoint_group': {
- 'name': 'patch_test'
- }
- }
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': 'ABC'}
- self.patch(url, body=body, expected_status=http_client.NOT_FOUND)
-
- def test_patch_invalid_endpoint_group(self):
- """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}
-
- Valid endpoint group patch test case.
-
- """
- body = {
- 'endpoint_group': {
- 'description': 'endpoint group description',
- 'filters': {
- 'region': 'UK'
- },
- 'name': 'patch_test'
- }
- }
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.patch(url, body=body, expected_status=http_client.BAD_REQUEST)
-
- # Perform a GET call to ensure that the content remains
- # the same (as DEFAULT_ENDPOINT_GROUP_BODY) after attempting to update
- # with an invalid filter
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- r = self.get(url)
- del r.result['endpoint_group']['id']
- del r.result['endpoint_group']['links']
- self.assertDictEqual(self.DEFAULT_ENDPOINT_GROUP_BODY, r.result)
-
- def test_delete_endpoint_group(self):
- """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
-
- Valid endpoint group test case.
-
- """
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.delete(url)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_delete_invalid_endpoint_group(self):
- """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}
-
- Invalid endpoint group test case.
-
- """
- endpoint_group_id = 'foobar'
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.delete(url, expected_status=http_client.NOT_FOUND)
-
- def test_add_endpoint_group_to_project(self):
- """Create a valid endpoint group and project association."""
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
- self._create_endpoint_group_project_association(endpoint_group_id,
- self.project_id)
-
- def test_add_endpoint_group_to_project_with_invalid_project_id(self):
- """Create an invalid endpoint group and project association."""
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # associate endpoint group with project
- project_id = uuid.uuid4().hex
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, project_id)
- self.put(url, expected_status=http_client.NOT_FOUND)
-
- def test_get_endpoint_group_in_project(self):
- """Test retrieving project endpoint group association."""
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # associate endpoint group with project
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, self.project_id)
- self.put(url)
- response = self.get(url)
- self.assertEqual(
- endpoint_group_id,
- response.result['project_endpoint_group']['endpoint_group_id'])
- self.assertEqual(
- self.project_id,
- response.result['project_endpoint_group']['project_id'])
-
- def test_get_invalid_endpoint_group_in_project(self):
- """Test retrieving project endpoint group association."""
- endpoint_group_id = uuid.uuid4().hex
- project_id = uuid.uuid4().hex
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, project_id)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_list_endpoint_groups_in_project(self):
- """GET /OS-EP-FILTER/projects/{project_id}/endpoint_groups."""
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # associate endpoint group with project
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, self.project_id)
- self.put(url)
-
- url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
- {'project_id': self.project_id})
- response = self.get(url)
-
- self.assertEqual(
- endpoint_group_id,
- response.result['endpoint_groups'][0]['id'])
-
- def test_list_endpoint_groups_in_invalid_project(self):
- """Test retrieving from invalid project."""
- project_id = uuid.uuid4().hex
- url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
- {'project_id': project_id})
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_empty_endpoint_groups_in_project(self):
- """Test when no endpoint groups associated with the project."""
- url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' %
- {'project_id': self.project_id})
- response = self.get(url)
-
- self.assertEqual(0, len(response.result['endpoint_groups']))
-
- def test_check_endpoint_group_to_project(self):
- """Test HEAD with a valid endpoint group and project association."""
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
- self._create_endpoint_group_project_association(endpoint_group_id,
- self.project_id)
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, self.project_id)
- self.head(url, expected_status=http_client.OK)
-
- def test_check_endpoint_group_to_project_with_invalid_project_id(self):
- """Test HEAD with an invalid endpoint group and project association."""
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # create an endpoint group to project association
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, self.project_id)
- self.put(url)
-
- # send a head request with an invalid project id
- project_id = uuid.uuid4().hex
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, project_id)
- self.head(url, expected_status=http_client.NOT_FOUND)
-
- def test_list_endpoint_groups(self):
- """GET /OS-EP-FILTER/endpoint_groups."""
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # recover all endpoint groups
- url = '/OS-EP-FILTER/endpoint_groups'
- r = self.get(url)
- self.assertNotEmpty(r.result['endpoint_groups'])
- self.assertEqual(endpoint_group_id,
- r.result['endpoint_groups'][0].get('id'))
-
- def test_list_projects_associated_with_endpoint_group(self):
- """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects
-
- Valid endpoint group test case.
-
- """
- # create an endpoint group to work with
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # associate endpoint group with project
- self._create_endpoint_group_project_association(endpoint_group_id,
- self.project_id)
-
- # recover list of projects associated with endpoint group
- url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
- '/projects' %
- {'endpoint_group_id': endpoint_group_id})
- self.get(url)
-
- def test_list_endpoints_associated_with_endpoint_group(self):
- """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/endpoints
-
- Valid endpoint group test case.
-
- """
- # create a service
- service_ref = unit.new_service_ref()
- response = self.post(
- '/services',
- body={'service': service_ref})
-
- service_id = response.result['service']['id']
-
- # create an endpoint
- endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
- interface='public',
- region_id=self.region_id)
- response = self.post('/endpoints', body={'endpoint': endpoint_ref})
- endpoint_id = response.result['endpoint']['id']
-
- # create an endpoint group
- body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
- body['endpoint_group']['filters'] = {'service_id': service_id}
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, body)
-
- # create association
- self._create_endpoint_group_project_association(endpoint_group_id,
- self.project_id)
-
- # recover list of endpoints associated with endpoint group
- url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
- '/endpoints' % {'endpoint_group_id': endpoint_group_id})
- r = self.get(url)
- self.assertNotEmpty(r.result['endpoints'])
- self.assertEqual(endpoint_id, r.result['endpoints'][0].get('id'))
-
- def test_list_endpoints_associated_with_project_endpoint_group(self):
- """GET /OS-EP-FILTER/projects/{project_id}/endpoints
-
- Valid project, endpoint id, and endpoint group test case.
-
- """
- # create a temporary service
- service_ref = unit.new_service_ref()
- response = self.post('/services', body={'service': service_ref})
- service_id2 = response.result['service']['id']
-
- # create additional endpoints
- self._create_endpoint_and_associations(
- self.default_domain_project_id, service_id2)
- self._create_endpoint_and_associations(
- self.default_domain_project_id)
-
- # create project and endpoint association with default endpoint:
- self.put(self.default_request_url)
-
- # create an endpoint group that contains a different endpoint
- body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY)
- body['endpoint_group']['filters'] = {'service_id': service_id2}
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, body)
-
- # associate endpoint group with project
- self._create_endpoint_group_project_association(
- endpoint_group_id, self.default_domain_project_id)
-
- # Now get a list of the filtered endpoints
- endpoints_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
- 'project_id': self.default_domain_project_id}
- r = self.get(endpoints_url)
- endpoints = self.assertValidEndpointListResponse(r)
- self.assertEqual(2, len(endpoints))
-
- # Ensure catalog includes the endpoints from endpoint_group project
- # association, this is needed when a project scoped token is issued
- # and "endpoint_filter.sql" backend driver is in place.
- user_id = uuid.uuid4().hex
- catalog_list = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
- self.assertEqual(2, len(catalog_list))
-
- # Now remove project endpoint group association
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, self.default_domain_project_id)
- self.delete(url)
-
- # Now remove endpoint group
- url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
- 'endpoint_group_id': endpoint_group_id}
- self.delete(url)
-
- r = self.get(endpoints_url)
- endpoints = self.assertValidEndpointListResponse(r)
- self.assertEqual(1, len(endpoints))
-
- catalog_list = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
- self.assertEqual(1, len(catalog_list))
-
- def test_endpoint_group_project_cleanup_with_project(self):
- # create endpoint group
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # create new project and associate with endpoint_group
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- r = self.post('/projects', body={'project': project_ref})
- project = self.assertValidProjectResponse(r, project_ref)
- url = self._get_project_endpoint_group_url(endpoint_group_id,
- project['id'])
- self.put(url)
-
- # check that we can recover the project endpoint group association
- self.get(url)
-
- # Now delete the project and then try and retrieve the project
- # endpoint group association again
- self.delete('/projects/%(project_id)s' % {
- 'project_id': project['id']})
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_endpoint_group_project_cleanup_with_endpoint_group(self):
- # create endpoint group
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # create new project and associate with endpoint_group
- project_ref = unit.new_project_ref(domain_id=self.domain_id)
- r = self.post('/projects', body={'project': project_ref})
- project = self.assertValidProjectResponse(r, project_ref)
- url = self._get_project_endpoint_group_url(endpoint_group_id,
- project['id'])
- self.put(url)
-
- # check that we can recover the project endpoint group association
- self.get(url)
-
- # now remove the project endpoint group association
- self.delete(url)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_removing_an_endpoint_group_project(self):
- # create an endpoint group
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # create an endpoint_group project
- url = self._get_project_endpoint_group_url(
- endpoint_group_id, self.default_domain_project_id)
- self.put(url)
-
- # remove the endpoint group project
- self.delete(url)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_remove_endpoint_group_with_project_association(self):
- # create an endpoint group
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # create an endpoint_group project
- project_endpoint_group_url = self._get_project_endpoint_group_url(
- endpoint_group_id, self.default_domain_project_id)
- self.put(project_endpoint_group_url)
-
- # remove endpoint group, the associated endpoint_group project will
- # be removed as well.
- endpoint_group_url = ('/OS-EP-FILTER/endpoint_groups/'
- '%(endpoint_group_id)s'
- % {'endpoint_group_id': endpoint_group_id})
- self.delete(endpoint_group_url)
- self.get(endpoint_group_url, expected_status=http_client.NOT_FOUND)
- self.get(project_endpoint_group_url,
- expected_status=http_client.NOT_FOUND)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_add_endpoint_group_to_project_invalidates_catalog_cache(self):
- # create another endpoint with 'admin' interface which matches
- # 'filters' definition in endpoint group, then there should be two
- # endpoints returned when retrieving v3 catalog if cache works as
- # expected.
- # this should be done at first since `create_endpoint` will also
- # invalidate cache.
- endpoint_id2 = uuid.uuid4().hex
- endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
- region_id=self.region_id,
- interface='admin',
- id=endpoint_id2)
- self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
-
- # create a project and endpoint association.
- self.put(self.default_request_url)
-
- # there is only one endpoint associated with the default project.
- user_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
-
- # create an endpoint group.
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # add the endpoint group to default project, bypassing
- # catalog_api API manager.
- self.catalog_api.driver.add_endpoint_group_to_project(
- endpoint_group_id,
- self.default_domain_project_id)
-
- # can get back only one endpoint from the cache, since the catalog
- # is pulled out from cache.
- invalid_catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertThat(invalid_catalog[0]['endpoints'],
- matchers.HasLength(1))
- self.assertEqual(catalog, invalid_catalog)
-
- # remove the endpoint group from default project, and add it again via
- # catalog_api API manager.
- self.catalog_api.driver.remove_endpoint_group_from_project(
- endpoint_group_id,
- self.default_domain_project_id)
-
- # add the endpoint group to default project.
- self.catalog_api.add_endpoint_group_to_project(
- endpoint_group_id,
- self.default_domain_project_id)
-
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- # now, it will return 2 endpoints since the cache has been
- # invalidated.
- self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
-
- ep_id_list = [catalog[0]['endpoints'][0]['id'],
- catalog[0]['endpoints'][1]['id']]
- self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_remove_endpoint_group_from_project_invalidates_cache(self):
- # create another endpoint with 'admin' interface which matches
- # 'filters' definition in endpoint group, then there should be two
- # endpoints returned when retrieving v3 catalog. But only one
- # endpoint will return after the endpoint group's deletion if cache
- # works as expected.
- # this should be done at first since `create_endpoint` will also
- # invalidate cache.
- endpoint_id2 = uuid.uuid4().hex
- endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
- region_id=self.region_id,
- interface='admin',
- id=endpoint_id2)
- self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
-
- # create project and endpoint association.
- self.put(self.default_request_url)
-
- # create an endpoint group.
- endpoint_group_id = self._create_valid_endpoint_group(
- self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
-
- # add the endpoint group to default project.
- self.catalog_api.add_endpoint_group_to_project(
- endpoint_group_id,
- self.default_domain_project_id)
-
- # should get back two endpoints, one from endpoint project
- # association, the other one is from endpoint_group project
- # association.
- user_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
-
- ep_id_list = [catalog[0]['endpoints'][0]['id'],
- catalog[0]['endpoints'][1]['id']]
- self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
-
- # remove endpoint_group project association, bypassing
- # catalog_api API manager.
- self.catalog_api.driver.remove_endpoint_group_from_project(
- endpoint_group_id,
- self.default_domain_project_id)
-
- # still get back two endpoints, since the catalog is pulled out
- # from cache and the cache haven't been invalidated.
- invalid_catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertThat(invalid_catalog[0]['endpoints'],
- matchers.HasLength(2))
- self.assertEqual(catalog, invalid_catalog)
-
- # add back the endpoint_group project association and remove it from
- # manager.
- self.catalog_api.driver.add_endpoint_group_to_project(
- endpoint_group_id,
- self.default_domain_project_id)
-
- self.catalog_api.remove_endpoint_group_from_project(
- endpoint_group_id,
- self.default_domain_project_id)
-
- # should only get back one endpoint since the cache has been
- # invalidated after the endpoint_group project association was
- # removed.
- catalog = self.catalog_api.get_v3_catalog(
- user_id,
- self.default_domain_project_id)
-
- self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
- self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
-
- def _create_valid_endpoint_group(self, url, body):
- r = self.post(url, body=body)
- return r.result['endpoint_group']['id']
-
- def _create_endpoint_group_project_association(self,
- endpoint_group_id,
- project_id):
- url = self._get_project_endpoint_group_url(endpoint_group_id,
- project_id)
- self.put(url)
-
- def _get_project_endpoint_group_url(self,
- endpoint_group_id,
- project_id):
- return ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
- '/projects/%(project_id)s' %
- {'endpoint_group_id': endpoint_group_id,
- 'project_id': project_id})
-
- def _create_endpoint_and_associations(self, project_id, service_id=None):
- """Creates an endpoint associated with service and project."""
- if not service_id:
- # create a new service
- service_ref = unit.new_service_ref()
- response = self.post(
- '/services', body={'service': service_ref})
- service_id = response.result['service']['id']
-
- # create endpoint
- endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
- interface='public',
- region_id=self.region_id)
- response = self.post('/endpoints', body={'endpoint': endpoint_ref})
- endpoint = response.result['endpoint']
-
- # now add endpoint to project
- self.put('/OS-EP-FILTER/projects/%(project_id)s'
- '/endpoints/%(endpoint_id)s' % {
- 'project_id': self.project['id'],
- 'endpoint_id': endpoint['id']})
- return endpoint
diff --git a/keystone-moon/keystone/tests/unit/test_auth.py b/keystone-moon/keystone/tests/unit/test_auth.py
deleted file mode 100644
index 6f44b316..00000000
--- a/keystone-moon/keystone/tests/unit/test_auth.py
+++ /dev/null
@@ -1,1446 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import datetime
-import random
-import string
-import uuid
-
-import mock
-from oslo_config import cfg
-import oslo_utils.fixture
-from oslo_utils import timeutils
-import six
-from testtools import matchers
-
-from keystone import assignment
-from keystone import auth
-from keystone.common import authorization
-from keystone.common import config
-from keystone import exception
-from keystone.models import token_model
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit.ksfixtures import database
-from keystone import token
-from keystone.token import provider
-from keystone import trust
-
-
-CONF = cfg.CONF
-TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
-
-HOST = ''.join(random.choice(string.ascii_lowercase) for x in range(
- random.randint(5, 15)))
-HOST_URL = 'http://%s' % (HOST)
-
-
-def _build_user_auth(token=None, user_id=None, username=None,
- password=None, tenant_id=None, tenant_name=None,
- trust_id=None):
- """Build auth dictionary.
-
- It will create an auth dictionary based on all the arguments
- that it receives.
- """
- auth_json = {}
- if token is not None:
- auth_json['token'] = token
- if username or password:
- auth_json['passwordCredentials'] = {}
- if username is not None:
- auth_json['passwordCredentials']['username'] = username
- if user_id is not None:
- auth_json['passwordCredentials']['userId'] = user_id
- if password is not None:
- auth_json['passwordCredentials']['password'] = password
- if tenant_name is not None:
- auth_json['tenantName'] = tenant_name
- if tenant_id is not None:
- auth_json['tenantId'] = tenant_id
- if trust_id is not None:
- auth_json['trust_id'] = trust_id
- return auth_json
-
-
-class AuthTest(unit.TestCase):
- def setUp(self):
- self.useFixture(database.Database())
- super(AuthTest, self).setUp()
- self.time_fixture = self.useFixture(oslo_utils.fixture.TimeFixture())
-
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- self.context_with_remote_user = {'environment':
- {'REMOTE_USER': 'FOO',
- 'AUTH_TYPE': 'Negotiate'}}
- self.empty_context = {'environment': {}}
-
- self.controller = token.controllers.Auth()
-
- def assertEqualTokens(self, a, b, enforce_audit_ids=True):
- """Assert that two tokens are equal.
-
- Compare two tokens except for their ids. This also truncates
- the time in the comparison.
- """
- def normalize(token):
- token['access']['token']['id'] = 'dummy'
- del token['access']['token']['expires']
- del token['access']['token']['issued_at']
- del token['access']['token']['audit_ids']
- return token
-
- self.assertCloseEnoughForGovernmentWork(
- timeutils.parse_isotime(a['access']['token']['expires']),
- timeutils.parse_isotime(b['access']['token']['expires']))
- self.assertCloseEnoughForGovernmentWork(
- timeutils.parse_isotime(a['access']['token']['issued_at']),
- timeutils.parse_isotime(b['access']['token']['issued_at']))
- if enforce_audit_ids:
- self.assertIn(a['access']['token']['audit_ids'][0],
- b['access']['token']['audit_ids'])
- self.assertThat(len(a['access']['token']['audit_ids']),
- matchers.LessThan(3))
- self.assertThat(len(b['access']['token']['audit_ids']),
- matchers.LessThan(3))
-
- return self.assertDictEqual(normalize(a), normalize(b))
-
-
-class AuthBadRequests(AuthTest):
- def test_no_external_auth(self):
- """Verify that _authenticate_external() raises exception if N/A."""
- self.assertRaises(
- token.controllers.ExternalAuthNotApplicable,
- self.controller._authenticate_external,
- context={}, auth={})
-
- def test_empty_remote_user(self):
- """Verify exception is raised when REMOTE_USER is an empty string."""
- context = {'environment': {'REMOTE_USER': ''}}
- self.assertRaises(
- token.controllers.ExternalAuthNotApplicable,
- self.controller._authenticate_external,
- context=context, auth={})
-
- def test_no_token_in_auth(self):
- """Verify that _authenticate_token() raises exception if no token."""
- self.assertRaises(
- exception.ValidationError,
- self.controller._authenticate_token,
- None, {})
-
- def test_no_credentials_in_auth(self):
- """Verify that _authenticate_local() raises exception if no creds."""
- self.assertRaises(
- exception.ValidationError,
- self.controller._authenticate_local,
- None, {})
-
- def test_empty_username_and_userid_in_auth(self):
- """Verify that empty username and userID raises ValidationError."""
- self.assertRaises(
- exception.ValidationError,
- self.controller._authenticate_local,
- None, {'passwordCredentials': {'password': 'abc',
- 'userId': '', 'username': ''}})
-
- def test_authenticate_blank_request_body(self):
- """Verify sending empty json dict raises the right exception."""
- self.assertRaises(exception.ValidationError,
- self.controller.authenticate,
- {}, {})
-
- def test_authenticate_blank_auth(self):
- """Verify sending blank 'auth' raises the right exception."""
- body_dict = _build_user_auth()
- self.assertRaises(exception.ValidationError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_invalid_auth_content(self):
- """Verify sending invalid 'auth' raises the right exception."""
- self.assertRaises(exception.ValidationError,
- self.controller.authenticate,
- {}, {'auth': 'abcd'})
-
- def test_authenticate_user_id_too_large(self):
- """Verify sending large 'userId' raises the right exception."""
- body_dict = _build_user_auth(user_id='0' * 65, username='FOO',
- password='foo2')
- self.assertRaises(exception.ValidationSizeError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_username_too_large(self):
- """Verify sending large 'username' raises the right exception."""
- body_dict = _build_user_auth(username='0' * 65, password='foo2')
- self.assertRaises(exception.ValidationSizeError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_tenant_id_too_large(self):
- """Verify sending large 'tenantId' raises the right exception."""
- body_dict = _build_user_auth(username='FOO', password='foo2',
- tenant_id='0' * 65)
- self.assertRaises(exception.ValidationSizeError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_tenant_name_too_large(self):
- """Verify sending large 'tenantName' raises the right exception."""
- body_dict = _build_user_auth(username='FOO', password='foo2',
- tenant_name='0' * 65)
- self.assertRaises(exception.ValidationSizeError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_token_too_large(self):
- """Verify sending large 'token' raises the right exception."""
- body_dict = _build_user_auth(token={'id': '0' * 8193})
- self.assertRaises(exception.ValidationSizeError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_password_too_large(self):
- """Verify sending large 'password' raises the right exception."""
- length = CONF.identity.max_password_length + 1
- body_dict = _build_user_auth(username='FOO', password='0' * length)
- self.assertRaises(exception.ValidationSizeError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_fails_if_project_unsafe(self):
- """Verify authenticate to a project with unsafe name fails."""
- # Start with url name restrictions off, so we can create the unsafe
- # named project
- self.config_fixture.config(group='resource',
- project_name_url_safe='off')
- unsafe_name = 'i am not / safe'
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id, name=unsafe_name)
- self.resource_api.create_project(project['id'], project)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], project['id'], self.role_member['id'])
- no_context = {}
-
- body_dict = _build_user_auth(
- username=self.user_foo['name'],
- password=self.user_foo['password'],
- tenant_name=project['name'])
-
- # Since name url restriction is off, we should be able to autenticate
- self.controller.authenticate(no_context, body_dict)
-
- # Set the name url restriction to strict and we should fail to
- # authenticate
- self.config_fixture.config(group='resource',
- project_name_url_safe='strict')
- self.assertRaises(exception.Unauthorized,
- self.controller.authenticate,
- no_context, body_dict)
-
-
-class AuthWithToken(AuthTest):
- def test_unscoped_token(self):
- """Verify getting an unscoped token with password creds."""
- body_dict = _build_user_auth(username='FOO',
- password='foo2')
- unscoped_token = self.controller.authenticate({}, body_dict)
- self.assertNotIn('tenant', unscoped_token['access']['token'])
-
- def test_auth_invalid_token(self):
- """Verify exception is raised if invalid token."""
- body_dict = _build_user_auth(token={"id": uuid.uuid4().hex})
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_auth_bad_formatted_token(self):
- """Verify exception is raised if invalid token."""
- body_dict = _build_user_auth(token={})
- self.assertRaises(
- exception.ValidationError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_auth_unscoped_token_no_project(self):
- """Verify getting an unscoped token with an unscoped token."""
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2')
- unscoped_token = self.controller.authenticate({}, body_dict)
-
- body_dict = _build_user_auth(
- token=unscoped_token["access"]["token"])
- unscoped_token_2 = self.controller.authenticate({}, body_dict)
-
- self.assertEqualTokens(unscoped_token, unscoped_token_2)
-
- def test_auth_unscoped_token_project(self):
- """Verify getting a token in a tenant with an unscoped token."""
- # Add a role in so we can check we get this back
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_member['id'])
- # Get an unscoped token
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2')
- unscoped_token = self.controller.authenticate({}, body_dict)
- # Get a token on BAR tenant using the unscoped token
- body_dict = _build_user_auth(
- token=unscoped_token["access"]["token"],
- tenant_name="BAR")
- scoped_token = self.controller.authenticate({}, body_dict)
-
- tenant = scoped_token["access"]["token"]["tenant"]
- roles = scoped_token["access"]["metadata"]["roles"]
- self.assertEqual(self.tenant_bar['id'], tenant["id"])
- self.assertThat(roles, matchers.Contains(self.role_member['id']))
-
- def test_auth_scoped_token_bad_project_with_debug(self):
- """Authenticating with an invalid project fails."""
- # Bug 1379952 reports poor user feedback, even in insecure_debug mode,
- # when the user accidentally passes a project name as an ID.
- # This test intentionally does exactly that.
- body_dict = _build_user_auth(
- username=self.user_foo['name'],
- password=self.user_foo['password'],
- tenant_id=self.tenant_bar['name'])
-
- # with insecure_debug enabled, this produces a friendly exception.
- self.config_fixture.config(debug=True, insecure_debug=True)
- e = self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- {}, body_dict)
- # explicitly verify that the error message shows that a *name* is
- # found where an *ID* is expected
- self.assertIn(
- 'Project ID not found: %s' % self.tenant_bar['name'],
- six.text_type(e))
-
- def test_auth_scoped_token_bad_project_without_debug(self):
- """Authenticating with an invalid project fails."""
- # Bug 1379952 reports poor user feedback, even in insecure_debug mode,
- # when the user accidentally passes a project name as an ID.
- # This test intentionally does exactly that.
- body_dict = _build_user_auth(
- username=self.user_foo['name'],
- password=self.user_foo['password'],
- tenant_id=self.tenant_bar['name'])
-
- # with insecure_debug disabled (the default), authentication failure
- # details are suppressed.
- e = self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- {}, body_dict)
- # explicitly verify that the error message details above have been
- # suppressed.
- self.assertNotIn(
- 'Project ID not found: %s' % self.tenant_bar['name'],
- six.text_type(e))
-
- def test_auth_token_project_group_role(self):
- """Verify getting a token in a tenant with group roles."""
- # Add a v2 style role in so we can check we get this back
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_member['id'])
- # Now create a group role for this user as well
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- new_group = unit.new_group_ref(domain_id=domain1['id'])
- new_group = self.identity_api.create_group(new_group)
- self.identity_api.add_user_to_group(self.user_foo['id'],
- new_group['id'])
- self.assignment_api.create_grant(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_admin['id'])
-
- # Get a scoped token for the tenant
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2',
- tenant_name="BAR")
-
- scoped_token = self.controller.authenticate({}, body_dict)
-
- tenant = scoped_token["access"]["token"]["tenant"]
- roles = scoped_token["access"]["metadata"]["roles"]
- self.assertEqual(self.tenant_bar['id'], tenant["id"])
- self.assertIn(self.role_member['id'], roles)
- self.assertIn(self.role_admin['id'], roles)
-
- def test_belongs_to_no_tenant(self):
- r = self.controller.authenticate(
- {},
- auth={
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': self.user_foo['password']
- }
- })
- unscoped_token_id = r['access']['token']['id']
- self.assertRaises(
- exception.Unauthorized,
- self.controller.validate_token,
- dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
- token_id=unscoped_token_id)
-
- def test_belongs_to(self):
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2',
- tenant_name="BAR")
-
- scoped_token = self.controller.authenticate({}, body_dict)
- scoped_token_id = scoped_token['access']['token']['id']
-
- self.assertRaises(
- exception.Unauthorized,
- self.controller.validate_token,
- dict(is_admin=True, query_string={'belongsTo': 'me'}),
- token_id=scoped_token_id)
-
- self.assertRaises(
- exception.Unauthorized,
- self.controller.validate_token,
- dict(is_admin=True, query_string={'belongsTo': 'BAR'}),
- token_id=scoped_token_id)
-
- def test_token_auth_with_binding(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- body_dict = _build_user_auth()
- unscoped_token = self.controller.authenticate(
- self.context_with_remote_user, body_dict)
-
- # the token should have bind information in it
- bind = unscoped_token['access']['token']['bind']
- self.assertEqual('FOO', bind['kerberos'])
-
- body_dict = _build_user_auth(
- token=unscoped_token['access']['token'],
- tenant_name='BAR')
-
- # using unscoped token without remote user context fails
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- self.empty_context, body_dict)
-
- # using token with remote user context succeeds
- scoped_token = self.controller.authenticate(
- self.context_with_remote_user, body_dict)
-
- # the bind information should be carried over from the original token
- bind = scoped_token['access']['token']['bind']
- self.assertEqual('FOO', bind['kerberos'])
-
- def test_deleting_role_revokes_token(self):
- role_controller = assignment.controllers.Role()
- project1 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project1['id'], project1)
- role_one = unit.new_role_ref(id='role_one')
- self.role_api.create_role(role_one['id'], role_one)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], project1['id'], role_one['id'])
- no_context = {}
-
- # Get a scoped token for the tenant
- body_dict = _build_user_auth(
- username=self.user_foo['name'],
- password=self.user_foo['password'],
- tenant_name=project1['name'])
- token = self.controller.authenticate(no_context, body_dict)
- # Ensure it is valid
- token_id = token['access']['token']['id']
- self.controller.validate_token(
- dict(is_admin=True, query_string={}),
- token_id=token_id)
-
- # Delete the role, which should invalidate the token
- role_controller.delete_role(
- dict(is_admin=True, query_string={}), role_one['id'])
-
- # Check the token is now invalid
- self.assertRaises(
- exception.TokenNotFound,
- self.controller.validate_token,
- dict(is_admin=True, query_string={}),
- token_id=token_id)
-
- def test_deleting_role_assignment_does_not_revoke_unscoped_token(self):
- no_context = {}
- admin_context = dict(is_admin=True, query_string={})
-
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project['id'], project)
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], project['id'], role['id'])
-
- # Get an unscoped token.
- token = self.controller.authenticate(no_context, _build_user_auth(
- username=self.user_foo['name'],
- password=self.user_foo['password']))
- token_id = token['access']['token']['id']
-
- # Ensure it is valid
- self.controller.validate_token(admin_context, token_id=token_id)
-
- # Delete the role assignment, which should not invalidate the token,
- # because we're not consuming it with just an unscoped token.
- self.assignment_api.remove_role_from_user_and_project(
- self.user_foo['id'], project['id'], role['id'])
-
- # Ensure it is still valid
- self.controller.validate_token(admin_context, token_id=token_id)
-
- def test_only_original_audit_id_is_kept(self):
- context = {}
-
- def get_audit_ids(token):
- return token['access']['token']['audit_ids']
-
- # get a token
- body_dict = _build_user_auth(username='FOO', password='foo2')
- unscoped_token = self.controller.authenticate(context, body_dict)
- starting_audit_id = get_audit_ids(unscoped_token)[0]
- self.assertIsNotNone(starting_audit_id)
-
- # get another token to ensure the correct parent audit_id is set
- body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
- unscoped_token_2 = self.controller.authenticate(context, body_dict)
- audit_ids = get_audit_ids(unscoped_token_2)
- self.assertThat(audit_ids, matchers.HasLength(2))
- self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id))
-
- # get another token from token 2 and ensure the correct parent
- # audit_id is set
- body_dict = _build_user_auth(token=unscoped_token_2["access"]["token"])
- unscoped_token_3 = self.controller.authenticate(context, body_dict)
- audit_ids = get_audit_ids(unscoped_token_3)
- self.assertThat(audit_ids, matchers.HasLength(2))
- self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id))
-
- def test_revoke_by_audit_chain_id_original_token(self):
- self.config_fixture.config(group='token', revoke_by_id=False)
- context = {}
-
- # get a token
- body_dict = _build_user_auth(username='FOO', password='foo2')
- unscoped_token = self.controller.authenticate(context, body_dict)
- token_id = unscoped_token['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
-
- # get a second token
- body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
- unscoped_token_2 = self.controller.authenticate(context, body_dict)
- token_2_id = unscoped_token_2['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
-
- self.token_provider_api.revoke_token(token_id, revoke_chain=True)
-
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_id)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_2_id)
-
- def test_revoke_by_audit_chain_id_chained_token(self):
- self.config_fixture.config(group='token', revoke_by_id=False)
- context = {}
-
- # get a token
- body_dict = _build_user_auth(username='FOO', password='foo2')
- unscoped_token = self.controller.authenticate(context, body_dict)
- token_id = unscoped_token['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
-
- # get a second token
- body_dict = _build_user_auth(token=unscoped_token["access"]["token"])
- unscoped_token_2 = self.controller.authenticate(context, body_dict)
- token_2_id = unscoped_token_2['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
-
- self.token_provider_api.revoke_token(token_2_id, revoke_chain=True)
-
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_id)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_2_id)
-
- def _mock_audit_info(self, parent_audit_id):
- # NOTE(morgainfainberg): The token model and other cases that are
- # extracting the audit id expect 'None' if the audit id doesn't
- # exist. This ensures that the audit_id is None and the
- # audit_chain_id will also return None.
- return [None, None]
-
- def test_revoke_with_no_audit_info(self):
- self.config_fixture.config(group='token', revoke_by_id=False)
- context = {}
-
- with mock.patch.object(provider, 'audit_info', self._mock_audit_info):
- # get a token
- body_dict = _build_user_auth(username='FOO', password='foo2')
- unscoped_token = self.controller.authenticate(context, body_dict)
- token_id = unscoped_token['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
-
- # get a second token
- body_dict = _build_user_auth(
- token=unscoped_token['access']['token'])
- unscoped_token_2 = self.controller.authenticate(context, body_dict)
- token_2_id = unscoped_token_2['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
-
- self.token_provider_api.revoke_token(token_id, revoke_chain=True)
- self.time_fixture.advance_time_seconds(1)
-
- revoke_events = self.revoke_api.list_events()
- self.assertThat(revoke_events, matchers.HasLength(1))
- revoke_event = revoke_events[0].to_dict()
- self.assertIn('expires_at', revoke_event)
- self.assertEqual(unscoped_token_2['access']['token']['expires'],
- revoke_event['expires_at'])
-
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_id)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_2_id)
-
- # get a new token, with no audit info
- body_dict = _build_user_auth(username='FOO', password='foo2')
- unscoped_token = self.controller.authenticate(context, body_dict)
- token_id = unscoped_token['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
- # get a second token
- body_dict = _build_user_auth(
- token=unscoped_token['access']['token'])
- unscoped_token_2 = self.controller.authenticate(context, body_dict)
- token_2_id = unscoped_token_2['access']['token']['id']
- self.time_fixture.advance_time_seconds(1)
-
- # Revoke by audit_id, no audit_info means both parent and child
- # token are revoked.
- self.token_provider_api.revoke_token(token_id)
- self.time_fixture.advance_time_seconds(1)
-
- revoke_events = self.revoke_api.list_events()
- self.assertThat(revoke_events, matchers.HasLength(2))
- revoke_event = revoke_events[1].to_dict()
- self.assertIn('expires_at', revoke_event)
- self.assertEqual(unscoped_token_2['access']['token']['expires'],
- revoke_event['expires_at'])
-
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_id)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- token_id=token_2_id)
-
-
-class FernetAuthWithToken(AuthWithToken):
- def config_overrides(self):
- super(FernetAuthWithToken, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- def test_token_auth_with_binding(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- body_dict = _build_user_auth()
- self.assertRaises(exception.NotImplemented,
- self.controller.authenticate,
- self.context_with_remote_user,
- body_dict)
-
- def test_revoke_with_no_audit_info(self):
- self.skipTest('Fernet with v2.0 and revocation is broken')
-
- def test_deleting_role_revokes_token(self):
- self.skipTest('Fernet with v2.0 and revocation is broken')
-
-
-class AuthWithPasswordCredentials(AuthTest):
- def test_auth_invalid_user(self):
- """Verify exception is raised if invalid user."""
- body_dict = _build_user_auth(
- username=uuid.uuid4().hex,
- password=uuid.uuid4().hex)
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_auth_valid_user_invalid_password(self):
- """Verify exception is raised if invalid password."""
- body_dict = _build_user_auth(
- username="FOO",
- password=uuid.uuid4().hex)
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_auth_empty_password(self):
- """Verify exception is raised if empty password."""
- body_dict = _build_user_auth(
- username="FOO",
- password="")
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_auth_no_password(self):
- """Verify exception is raised if empty password."""
- body_dict = _build_user_auth(username="FOO")
- self.assertRaises(
- exception.ValidationError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_blank_password_credentials(self):
- """Sending empty dict as passwordCredentials raises 400 Bad Requset."""
- body_dict = {'passwordCredentials': {}, 'tenantName': 'demo'}
- self.assertRaises(exception.ValidationError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_authenticate_no_username(self):
- """Verify skipping username raises the right exception."""
- body_dict = _build_user_auth(password="pass",
- tenant_name="demo")
- self.assertRaises(exception.ValidationError,
- self.controller.authenticate,
- {}, body_dict)
-
- def test_bind_without_remote_user(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- body_dict = _build_user_auth(username='FOO', password='foo2',
- tenant_name='BAR')
- token = self.controller.authenticate({}, body_dict)
- self.assertNotIn('bind', token['access']['token'])
-
- def test_change_default_domain_id(self):
- # If the default_domain_id config option is not the default then the
- # user in auth data is from the new default domain.
-
- # 1) Create a new domain.
- new_domain = unit.new_domain_ref()
- new_domain_id = new_domain['id']
-
- self.resource_api.create_domain(new_domain_id, new_domain)
-
- # 2) Create user "foo" in new domain with different password than
- # default-domain foo.
- new_user = unit.create_user(self.identity_api,
- name=self.user_foo['name'],
- domain_id=new_domain_id)
-
- # 3) Update the default_domain_id config option to the new domain
-
- self.config_fixture.config(group='identity',
- default_domain_id=new_domain_id)
-
- # 4) Authenticate as "foo" using the password in the new domain.
-
- body_dict = _build_user_auth(
- username=self.user_foo['name'],
- password=new_user['password'])
-
- # The test is successful if this doesn't raise, so no need to assert.
- self.controller.authenticate({}, body_dict)
-
-
-class AuthWithRemoteUser(AuthTest):
- def test_unscoped_remote_authn(self):
- """Verify getting an unscoped token with external authn."""
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2')
- local_token = self.controller.authenticate(
- {}, body_dict)
-
- body_dict = _build_user_auth()
- remote_token = self.controller.authenticate(
- self.context_with_remote_user, body_dict)
-
- self.assertEqualTokens(local_token, remote_token,
- enforce_audit_ids=False)
-
- def test_unscoped_remote_authn_jsonless(self):
- """Verify that external auth with invalid request fails."""
- self.assertRaises(
- exception.ValidationError,
- self.controller.authenticate,
- {'REMOTE_USER': 'FOO'},
- None)
-
- def test_scoped_remote_authn(self):
- """Verify getting a token with external authn."""
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2',
- tenant_name='BAR')
- local_token = self.controller.authenticate(
- {}, body_dict)
-
- body_dict = _build_user_auth(
- tenant_name='BAR')
- remote_token = self.controller.authenticate(
- self.context_with_remote_user, body_dict)
-
- self.assertEqualTokens(local_token, remote_token,
- enforce_audit_ids=False)
-
- def test_scoped_nometa_remote_authn(self):
- """Verify getting a token with external authn and no metadata."""
- body_dict = _build_user_auth(
- username='TWO',
- password='two2',
- tenant_name='BAZ')
- local_token = self.controller.authenticate(
- {}, body_dict)
-
- body_dict = _build_user_auth(tenant_name='BAZ')
- remote_token = self.controller.authenticate(
- {'environment': {'REMOTE_USER': 'TWO'}}, body_dict)
-
- self.assertEqualTokens(local_token, remote_token,
- enforce_audit_ids=False)
-
- def test_scoped_remote_authn_invalid_user(self):
- """Verify that external auth with invalid user fails."""
- body_dict = _build_user_auth(tenant_name="BAR")
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate,
- {'environment': {'REMOTE_USER': uuid.uuid4().hex}},
- body_dict)
-
- def test_bind_with_kerberos(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- body_dict = _build_user_auth(tenant_name="BAR")
- token = self.controller.authenticate(self.context_with_remote_user,
- body_dict)
- self.assertEqual('FOO', token['access']['token']['bind']['kerberos'])
-
- def test_bind_without_config_opt(self):
- self.config_fixture.config(group='token', bind=['x509'])
- body_dict = _build_user_auth(tenant_name='BAR')
- token = self.controller.authenticate(self.context_with_remote_user,
- body_dict)
- self.assertNotIn('bind', token['access']['token'])
-
-
-class AuthWithTrust(AuthTest):
- def setUp(self):
- super(AuthWithTrust, self).setUp()
-
- self.trust_controller = trust.controllers.TrustV3()
- self.auth_v3_controller = auth.controllers.Auth()
- self.trustor = self.user_foo
- self.trustee = self.user_two
- self.assigned_roles = [self.role_member['id'],
- self.role_browser['id']]
- for assigned_role in self.assigned_roles:
- self.assignment_api.add_role_to_user_and_project(
- self.trustor['id'], self.tenant_bar['id'], assigned_role)
-
- self.sample_data = {'trustor_user_id': self.trustor['id'],
- 'trustee_user_id': self.trustee['id'],
- 'project_id': self.tenant_bar['id'],
- 'impersonation': True,
- 'roles': [{'id': self.role_browser['id']},
- {'name': self.role_member['name']}]}
-
- def config_overrides(self):
- super(AuthWithTrust, self).config_overrides()
- self.config_fixture.config(group='trust', enabled=True)
-
- def _create_auth_context(self, token_id):
- token_ref = token_model.KeystoneToken(
- token_id=token_id,
- token_data=self.token_provider_api.validate_token(token_id))
- auth_context = authorization.token_to_auth_context(token_ref)
- # NOTE(gyee): if public_endpoint and admin_endpoint are not set, which
- # is the default, the base url will be constructed from the environment
- # variables wsgi.url_scheme, SERVER_NAME, SERVER_PORT, and SCRIPT_NAME.
- # We have to set them in the context so the base url can be constructed
- # accordingly.
- return {'environment': {authorization.AUTH_CONTEXT_ENV: auth_context,
- 'wsgi.url_scheme': 'http',
- 'SCRIPT_NAME': '/v3',
- 'SERVER_PORT': '80',
- 'SERVER_NAME': HOST},
- 'token_id': token_id,
- 'host_url': HOST_URL}
-
- def create_trust(self, trust_data, trustor_name, expires_at=None,
- impersonation=True):
- username = trustor_name
- password = 'foo2'
- unscoped_token = self.get_unscoped_token(username, password)
- context = self._create_auth_context(
- unscoped_token['access']['token']['id'])
- trust_data_copy = copy.deepcopy(trust_data)
- trust_data_copy['expires_at'] = expires_at
- trust_data_copy['impersonation'] = impersonation
-
- return self.trust_controller.create_trust(
- context, trust=trust_data_copy)['trust']
-
- def get_unscoped_token(self, username, password='foo2'):
- body_dict = _build_user_auth(username=username, password=password)
- return self.controller.authenticate({}, body_dict)
-
- def build_v2_token_request(self, username, password, trust,
- tenant_id=None):
- if not tenant_id:
- tenant_id = self.tenant_bar['id']
- unscoped_token = self.get_unscoped_token(username, password)
- unscoped_token_id = unscoped_token['access']['token']['id']
- request_body = _build_user_auth(token={'id': unscoped_token_id},
- trust_id=trust['id'],
- tenant_id=tenant_id)
- return request_body
-
- def test_create_trust_bad_data_fails(self):
- unscoped_token = self.get_unscoped_token(self.trustor['name'])
- context = self._create_auth_context(
- unscoped_token['access']['token']['id'])
- bad_sample_data = {'trustor_user_id': self.trustor['id'],
- 'project_id': self.tenant_bar['id'],
- 'roles': [{'id': self.role_browser['id']}]}
-
- self.assertRaises(exception.ValidationError,
- self.trust_controller.create_trust,
- context, trust=bad_sample_data)
-
- def test_create_trust_no_roles(self):
- unscoped_token = self.get_unscoped_token(self.trustor['name'])
- context = {'token_id': unscoped_token['access']['token']['id']}
- self.sample_data['roles'] = []
- self.assertRaises(exception.Forbidden,
- self.trust_controller.create_trust,
- context, trust=self.sample_data)
-
- def test_create_trust(self):
- expires_at = (timeutils.utcnow() +
- datetime.timedelta(minutes=10)).strftime(TIME_FORMAT)
- new_trust = self.create_trust(self.sample_data, self.trustor['name'],
- expires_at=expires_at)
- self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
- self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
- role_ids = [self.role_browser['id'], self.role_member['id']]
- self.assertTrue(timeutils.parse_strtime(new_trust['expires_at'],
- fmt=TIME_FORMAT))
- self.assertIn('%s/v3/OS-TRUST/' % HOST_URL,
- new_trust['links']['self'])
- self.assertIn('%s/v3/OS-TRUST/' % HOST_URL,
- new_trust['roles_links']['self'])
-
- for role in new_trust['roles']:
- self.assertIn(role['id'], role_ids)
-
- def test_create_trust_expires_bad(self):
- self.assertRaises(exception.ValidationTimeStampError,
- self.create_trust, self.sample_data,
- self.trustor['name'], expires_at="bad")
- self.assertRaises(exception.ValidationTimeStampError,
- self.create_trust, self.sample_data,
- self.trustor['name'], expires_at="")
- self.assertRaises(exception.ValidationTimeStampError,
- self.create_trust, self.sample_data,
- self.trustor['name'], expires_at="Z")
-
- def test_create_trust_expires_older_than_now(self):
- self.assertRaises(exception.ValidationExpirationError,
- self.create_trust, self.sample_data,
- self.trustor['name'],
- expires_at="2010-06-04T08:44:31.999999Z")
-
- def test_create_trust_without_project_id(self):
- """Verify that trust can be created without project id.
-
- Also, token can be generated with that trust.
- """
- unscoped_token = self.get_unscoped_token(self.trustor['name'])
- context = self._create_auth_context(
- unscoped_token['access']['token']['id'])
- self.sample_data['project_id'] = None
- self.sample_data['roles'] = []
- new_trust = self.trust_controller.create_trust(
- context, trust=self.sample_data)['trust']
- self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
- self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
- self.assertIs(new_trust['impersonation'], True)
- auth_response = self.fetch_v2_token_from_trust(new_trust)
- token_user = auth_response['access']['user']
- self.assertEqual(token_user['id'], new_trust['trustor_user_id'])
-
- def test_get_trust(self):
- unscoped_token = self.get_unscoped_token(self.trustor['name'])
- context = self._create_auth_context(
- unscoped_token['access']['token']['id'])
- new_trust = self.trust_controller.create_trust(
- context, trust=self.sample_data)['trust']
- trust = self.trust_controller.get_trust(context,
- new_trust['id'])['trust']
- self.assertEqual(self.trustor['id'], trust['trustor_user_id'])
- self.assertEqual(self.trustee['id'], trust['trustee_user_id'])
- role_ids = [self.role_browser['id'], self.role_member['id']]
- for role in new_trust['roles']:
- self.assertIn(role['id'], role_ids)
-
- def test_get_trust_without_auth_context(self):
- """Verify a trust cannot be retrieved if auth context is missing."""
- unscoped_token = self.get_unscoped_token(self.trustor['name'])
- context = self._create_auth_context(
- unscoped_token['access']['token']['id'])
- new_trust = self.trust_controller.create_trust(
- context, trust=self.sample_data)['trust']
- # Delete the auth context before calling get_trust().
- del context['environment'][authorization.AUTH_CONTEXT_ENV]
- self.assertRaises(exception.Forbidden,
- self.trust_controller.get_trust, context,
- new_trust['id'])
-
- def test_create_trust_no_impersonation(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'],
- expires_at=None, impersonation=False)
- self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
- self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
- self.assertIs(new_trust['impersonation'], False)
- auth_response = self.fetch_v2_token_from_trust(new_trust)
- token_user = auth_response['access']['user']
- self.assertEqual(token_user['id'], new_trust['trustee_user_id'])
-
- def test_create_trust_impersonation(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
- self.assertEqual(self.trustee['id'], new_trust['trustee_user_id'])
- self.assertIs(new_trust['impersonation'], True)
- auth_response = self.fetch_v2_token_from_trust(new_trust)
- token_user = auth_response['access']['user']
- self.assertEqual(token_user['id'], new_trust['trustor_user_id'])
-
- def test_token_from_trust_wrong_user_fails(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- request_body = self.build_v2_token_request('FOO', 'foo2', new_trust)
- self.assertRaises(exception.Forbidden, self.controller.authenticate,
- {}, request_body)
-
- def test_token_from_trust_wrong_project_fails(self):
- for assigned_role in self.assigned_roles:
- self.assignment_api.add_role_to_user_and_project(
- self.trustor['id'], self.tenant_baz['id'], assigned_role)
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- request_body = self.build_v2_token_request('TWO', 'two2', new_trust,
- self.tenant_baz['id'])
- self.assertRaises(exception.Forbidden, self.controller.authenticate,
- {}, request_body)
-
- def fetch_v2_token_from_trust(self, trust):
- request_body = self.build_v2_token_request('TWO', 'two2', trust)
- auth_response = self.controller.authenticate({}, request_body)
- return auth_response
-
- def fetch_v3_token_from_trust(self, trust, trustee):
- v3_password_data = {
- 'identity': {
- "methods": ["password"],
- "password": {
- "user": {
- "id": trustee["id"],
- "password": trustee["password"]
- }
- }
- },
- 'scope': {
- 'project': {
- 'id': self.tenant_baz['id']
- }
- }
- }
- auth_response = (self.auth_v3_controller.authenticate_for_token
- ({'environment': {},
- 'query_string': {}},
- v3_password_data))
- token = auth_response.headers['X-Subject-Token']
-
- v3_req_with_trust = {
- "identity": {
- "methods": ["token"],
- "token": {"id": token}},
- "scope": {
- "OS-TRUST:trust": {"id": trust['id']}}}
- token_auth_response = (self.auth_v3_controller.authenticate_for_token
- ({'environment': {},
- 'query_string': {}},
- v3_req_with_trust))
- return token_auth_response
-
- def test_create_v3_token_from_trust(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee)
-
- trust_token_user = auth_response.json['token']['user']
- self.assertEqual(self.trustor['id'], trust_token_user['id'])
-
- trust_token_trust = auth_response.json['token']['OS-TRUST:trust']
- self.assertEqual(trust_token_trust['id'], new_trust['id'])
- self.assertEqual(self.trustor['id'],
- trust_token_trust['trustor_user']['id'])
- self.assertEqual(self.trustee['id'],
- trust_token_trust['trustee_user']['id'])
-
- trust_token_roles = auth_response.json['token']['roles']
- self.assertEqual(2, len(trust_token_roles))
-
- def test_v3_trust_token_get_token_fails(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee)
- trust_token = auth_response.headers['X-Subject-Token']
- v3_token_data = {'identity': {
- 'methods': ['token'],
- 'token': {'id': trust_token}
- }}
- self.assertRaises(
- exception.Forbidden,
- self.auth_v3_controller.authenticate_for_token,
- {'environment': {},
- 'query_string': {}}, v3_token_data)
-
- def test_token_from_trust(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- auth_response = self.fetch_v2_token_from_trust(new_trust)
-
- self.assertIsNotNone(auth_response)
- self.assertEqual(2,
- len(auth_response['access']['metadata']['roles']),
- "user_foo has three roles, but the token should"
- " only get the two roles specified in the trust.")
-
- def assert_token_count_for_trust(self, trust, expected_value):
- tokens = self.token_provider_api._persistence._list_tokens(
- self.trustee['id'], trust_id=trust['id'])
- token_count = len(tokens)
- self.assertEqual(expected_value, token_count)
-
- def test_delete_tokens_for_user_invalidates_tokens_from_trust(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- self.assert_token_count_for_trust(new_trust, 0)
- self.fetch_v2_token_from_trust(new_trust)
- self.assert_token_count_for_trust(new_trust, 1)
- self.token_provider_api._persistence.delete_tokens_for_user(
- self.trustee['id'])
- self.assert_token_count_for_trust(new_trust, 0)
-
- def test_token_from_trust_cant_get_another_token(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- auth_response = self.fetch_v2_token_from_trust(new_trust)
- trust_token_id = auth_response['access']['token']['id']
- request_body = _build_user_auth(token={'id': trust_token_id},
- tenant_id=self.tenant_bar['id'])
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate, {}, request_body)
-
- def test_delete_trust_revokes_token(self):
- unscoped_token = self.get_unscoped_token(self.trustor['name'])
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- context = self._create_auth_context(
- unscoped_token['access']['token']['id'])
- self.fetch_v2_token_from_trust(new_trust)
- trust_id = new_trust['id']
- tokens = self.token_provider_api._persistence._list_tokens(
- self.trustor['id'],
- trust_id=trust_id)
- self.assertEqual(1, len(tokens))
- self.trust_controller.delete_trust(context, trust_id=trust_id)
- tokens = self.token_provider_api._persistence._list_tokens(
- self.trustor['id'],
- trust_id=trust_id)
- self.assertEqual(0, len(tokens))
-
- def test_token_from_trust_with_no_role_fails(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- for assigned_role in self.assigned_roles:
- self.assignment_api.remove_role_from_user_and_project(
- self.trustor['id'], self.tenant_bar['id'], assigned_role)
- request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
- self.assertRaises(
- exception.Forbidden,
- self.controller.authenticate, {}, request_body)
-
- def test_expired_trust_get_token_fails(self):
- expires_at = (timeutils.utcnow() +
- datetime.timedelta(minutes=5)).strftime(TIME_FORMAT)
- time_expired = timeutils.utcnow() + datetime.timedelta(minutes=10)
- new_trust = self.create_trust(self.sample_data, self.trustor['name'],
- expires_at)
- with mock.patch.object(timeutils, 'utcnow') as mock_now:
- mock_now.return_value = time_expired
- request_body = self.build_v2_token_request('TWO', 'two2',
- new_trust)
- self.assertRaises(
- exception.Forbidden,
- self.controller.authenticate, {}, request_body)
-
- def test_token_from_trust_with_wrong_role_fails(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- self.assignment_api.add_role_to_user_and_project(
- self.trustor['id'],
- self.tenant_bar['id'],
- self.role_other['id'])
- for assigned_role in self.assigned_roles:
- self.assignment_api.remove_role_from_user_and_project(
- self.trustor['id'], self.tenant_bar['id'], assigned_role)
-
- request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
-
- self.assertRaises(
- exception.Forbidden,
- self.controller.authenticate, {}, request_body)
-
- def test_do_not_consume_remaining_uses_when_get_token_fails(self):
- trust_data = copy.deepcopy(self.sample_data)
- trust_data['remaining_uses'] = 3
- new_trust = self.create_trust(trust_data, self.trustor['name'])
-
- for assigned_role in self.assigned_roles:
- self.assignment_api.remove_role_from_user_and_project(
- self.trustor['id'], self.tenant_bar['id'], assigned_role)
-
- request_body = self.build_v2_token_request('TWO', 'two2', new_trust)
- self.assertRaises(exception.Forbidden,
- self.controller.authenticate, {}, request_body)
-
- unscoped_token = self.get_unscoped_token(self.trustor['name'])
- context = self._create_auth_context(
- unscoped_token['access']['token']['id'])
- trust = self.trust_controller.get_trust(context,
- new_trust['id'])['trust']
- self.assertEqual(3, trust['remaining_uses'])
-
- def disable_user(self, user):
- user['enabled'] = False
- self.identity_api.update_user(user['id'], user)
-
- def test_trust_get_token_fails_if_trustor_disabled(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- request_body = self.build_v2_token_request(self.trustee['name'],
- self.trustee['password'],
- new_trust)
- self.disable_user(self.trustor)
- self.assertRaises(
- exception.Forbidden,
- self.controller.authenticate, {}, request_body)
-
- def test_trust_get_token_fails_if_trustee_disabled(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- request_body = self.build_v2_token_request(self.trustee['name'],
- self.trustee['password'],
- new_trust)
- self.disable_user(self.trustee)
- self.assertRaises(
- exception.Unauthorized,
- self.controller.authenticate, {}, request_body)
-
-
-class TokenExpirationTest(AuthTest):
-
- @mock.patch.object(timeutils, 'utcnow')
- def _maintain_token_expiration(self, mock_utcnow):
- """Token expiration should be maintained after re-auth & validation."""
- now = datetime.datetime.utcnow()
- mock_utcnow.return_value = now
-
- r = self.controller.authenticate(
- {},
- auth={
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': self.user_foo['password']
- }
- })
- unscoped_token_id = r['access']['token']['id']
- original_expiration = r['access']['token']['expires']
-
- mock_utcnow.return_value = now + datetime.timedelta(seconds=1)
-
- r = self.controller.validate_token(
- dict(is_admin=True, query_string={}),
- token_id=unscoped_token_id)
- self.assertEqual(original_expiration, r['access']['token']['expires'])
-
- mock_utcnow.return_value = now + datetime.timedelta(seconds=2)
-
- r = self.controller.authenticate(
- {},
- auth={
- 'token': {
- 'id': unscoped_token_id,
- },
- 'tenantId': self.tenant_bar['id'],
- })
- scoped_token_id = r['access']['token']['id']
- self.assertEqual(original_expiration, r['access']['token']['expires'])
-
- mock_utcnow.return_value = now + datetime.timedelta(seconds=3)
-
- r = self.controller.validate_token(
- dict(is_admin=True, query_string={}),
- token_id=scoped_token_id)
- self.assertEqual(original_expiration, r['access']['token']['expires'])
-
- def test_maintain_uuid_token_expiration(self):
- self.config_fixture.config(group='token', provider='uuid')
- self._maintain_token_expiration()
-
-
-class AuthCatalog(unit.SQLDriverOverrides, AuthTest):
- """Tests for the catalog provided in the auth response."""
-
- def config_files(self):
- config_files = super(AuthCatalog, self).config_files()
- # We need to use a backend that supports disabled endpoints, like the
- # SQL backend.
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def _create_endpoints(self):
- def create_region(**kwargs):
- ref = unit.new_region_ref(**kwargs)
- self.catalog_api.create_region(ref)
- return ref
-
- def create_endpoint(service_id, region, **kwargs):
- endpoint = unit.new_endpoint_ref(region_id=region,
- service_id=service_id, **kwargs)
-
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- return endpoint
-
- # Create a service for use with the endpoints.
- def create_service(**kwargs):
- ref = unit.new_service_ref(**kwargs)
- self.catalog_api.create_service(ref['id'], ref)
- return ref
-
- enabled_service_ref = create_service(enabled=True)
- disabled_service_ref = create_service(enabled=False)
-
- region = create_region()
-
- # Create endpoints
- enabled_endpoint_ref = create_endpoint(
- enabled_service_ref['id'], region['id'])
- create_endpoint(
- enabled_service_ref['id'], region['id'], enabled=False,
- interface='internal')
- create_endpoint(
- disabled_service_ref['id'], region['id'])
-
- return enabled_endpoint_ref
-
- def test_auth_catalog_disabled_endpoint(self):
- """On authenticate, get a catalog that excludes disabled endpoints."""
- endpoint_ref = self._create_endpoints()
-
- # Authenticate
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2',
- tenant_name="BAR")
-
- token = self.controller.authenticate({}, body_dict)
-
- # Check the catalog
- self.assertEqual(1, len(token['access']['serviceCatalog']))
- endpoint = token['access']['serviceCatalog'][0]['endpoints'][0]
- self.assertEqual(
- 1, len(token['access']['serviceCatalog'][0]['endpoints']))
-
- exp_endpoint = {
- 'id': endpoint_ref['id'],
- 'publicURL': endpoint_ref['url'],
- 'region': endpoint_ref['region_id'],
- }
-
- self.assertEqual(exp_endpoint, endpoint)
-
- def test_validate_catalog_disabled_endpoint(self):
- """On validate, get back a catalog that excludes disabled endpoints."""
- endpoint_ref = self._create_endpoints()
-
- # Authenticate
- body_dict = _build_user_auth(
- username='FOO',
- password='foo2',
- tenant_name="BAR")
-
- token = self.controller.authenticate({}, body_dict)
-
- # Validate
- token_id = token['access']['token']['id']
- validate_ref = self.controller.validate_token(
- dict(is_admin=True, query_string={}),
- token_id=token_id)
-
- # Check the catalog
- self.assertEqual(1, len(token['access']['serviceCatalog']))
- endpoint = validate_ref['access']['serviceCatalog'][0]['endpoints'][0]
- self.assertEqual(
- 1, len(token['access']['serviceCatalog'][0]['endpoints']))
-
- exp_endpoint = {
- 'id': endpoint_ref['id'],
- 'publicURL': endpoint_ref['url'],
- 'region': endpoint_ref['region_id'],
- }
-
- self.assertEqual(exp_endpoint, endpoint)
-
-
-class NonDefaultAuthTest(unit.TestCase):
-
- def test_add_non_default_auth_method(self):
- self.config_fixture.config(group='auth',
- methods=['password', 'token', 'custom'])
- config.setup_authentication()
- self.assertTrue(hasattr(CONF.auth, 'custom'))
diff --git a/keystone-moon/keystone/tests/unit/test_auth_plugin.py b/keystone-moon/keystone/tests/unit/test_auth_plugin.py
deleted file mode 100644
index f0862ed6..00000000
--- a/keystone-moon/keystone/tests/unit/test_auth_plugin.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import mock
-
-from keystone import auth
-from keystone import exception
-from keystone.tests import unit
-
-
-# for testing purposes only
-METHOD_NAME = 'simple_challenge_response'
-EXPECTED_RESPONSE = uuid.uuid4().hex
-DEMO_USER_ID = uuid.uuid4().hex
-
-
-class SimpleChallengeResponse(auth.AuthMethodHandler):
- def authenticate(self, context, auth_payload, user_context):
- if 'response' in auth_payload:
- if auth_payload['response'] != EXPECTED_RESPONSE:
- raise exception.Unauthorized('Wrong answer')
- user_context['user_id'] = DEMO_USER_ID
- else:
- return {"challenge": "What's the name of your high school?"}
-
-
-class TestAuthPlugin(unit.SQLDriverOverrides, unit.TestCase):
- def setUp(self):
- super(TestAuthPlugin, self).setUp()
- self.load_backends()
-
- self.api = auth.controllers.Auth()
-
- def config_overrides(self):
- super(TestAuthPlugin, self).config_overrides()
- method_opts = {
- METHOD_NAME:
- 'keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse',
- }
-
- self.auth_plugin_config_override(
- methods=['external', 'password', 'token', METHOD_NAME],
- **method_opts)
-
- def test_unsupported_auth_method(self):
- method_name = uuid.uuid4().hex
- auth_data = {'methods': [method_name]}
- auth_data[method_name] = {'test': 'test'}
- auth_data = {'identity': auth_data}
- self.assertRaises(exception.AuthMethodNotSupported,
- auth.controllers.AuthInfo.create,
- None,
- auth_data)
-
- def test_addition_auth_steps(self):
- auth_data = {'methods': [METHOD_NAME]}
- auth_data[METHOD_NAME] = {
- 'test': 'test'}
- auth_data = {'identity': auth_data}
- auth_info = auth.controllers.AuthInfo.create(None, auth_data)
- auth_context = {'extras': {}, 'method_names': []}
- try:
- self.api.authenticate({'environment': {}}, auth_info, auth_context)
- except exception.AdditionalAuthRequired as e:
- self.assertIn('methods', e.authentication)
- self.assertIn(METHOD_NAME, e.authentication['methods'])
- self.assertIn(METHOD_NAME, e.authentication)
- self.assertIn('challenge', e.authentication[METHOD_NAME])
-
- # test correct response
- auth_data = {'methods': [METHOD_NAME]}
- auth_data[METHOD_NAME] = {
- 'response': EXPECTED_RESPONSE}
- auth_data = {'identity': auth_data}
- auth_info = auth.controllers.AuthInfo.create(None, auth_data)
- auth_context = {'extras': {}, 'method_names': []}
- self.api.authenticate({'environment': {}}, auth_info, auth_context)
- self.assertEqual(DEMO_USER_ID, auth_context['user_id'])
-
- # test incorrect response
- auth_data = {'methods': [METHOD_NAME]}
- auth_data[METHOD_NAME] = {
- 'response': uuid.uuid4().hex}
- auth_data = {'identity': auth_data}
- auth_info = auth.controllers.AuthInfo.create(None, auth_data)
- auth_context = {'extras': {}, 'method_names': []}
- self.assertRaises(exception.Unauthorized,
- self.api.authenticate,
- {'environment': {}},
- auth_info,
- auth_context)
-
- def test_duplicate_method(self):
- # Having the same method twice doesn't cause load_auth_methods to fail.
- self.auth_plugin_config_override(
- methods=['external', 'external'])
- self.clear_auth_plugin_registry()
- auth.controllers.load_auth_methods()
- self.assertIn('external', auth.controllers.AUTH_METHODS)
-
-
-class TestAuthPluginDynamicOptions(TestAuthPlugin):
- def config_overrides(self):
- super(TestAuthPluginDynamicOptions, self).config_overrides()
- # Clear the override for the [auth] ``methods`` option so it is
- # possible to load the options from the config file.
- self.config_fixture.conf.clear_override('methods', group='auth')
-
- def config_files(self):
- config_files = super(TestAuthPluginDynamicOptions, self).config_files()
- config_files.append(unit.dirs.tests_conf('test_auth_plugin.conf'))
- return config_files
-
-
-class TestMapped(unit.TestCase):
- def setUp(self):
- super(TestMapped, self).setUp()
- self.load_backends()
-
- self.api = auth.controllers.Auth()
-
- def config_files(self):
- config_files = super(TestMapped, self).config_files()
- config_files.append(unit.dirs.tests_conf('test_auth_plugin.conf'))
- return config_files
-
- def auth_plugin_config_override(self, methods=None, **method_classes):
- # Do not apply the auth plugin overrides so that the config file is
- # tested
- pass
-
- def _test_mapped_invocation_with_method_name(self, method_name):
- with mock.patch.object(auth.plugins.mapped.Mapped,
- 'authenticate',
- return_value=None) as authenticate:
- context = {'environment': {}}
- auth_data = {
- 'identity': {
- 'methods': [method_name],
- method_name: {'protocol': method_name},
- }
- }
- auth_info = auth.controllers.AuthInfo.create(context, auth_data)
- auth_context = {'extras': {},
- 'method_names': [],
- 'user_id': uuid.uuid4().hex}
- self.api.authenticate(context, auth_info, auth_context)
- # make sure Mapped plugin got invoked with the correct payload
- ((context, auth_payload, auth_context),
- kwargs) = authenticate.call_args
- self.assertEqual(method_name, auth_payload['protocol'])
-
- def test_mapped_with_remote_user(self):
- with mock.patch.object(auth.plugins.mapped.Mapped,
- 'authenticate',
- return_value=None) as authenticate:
- # external plugin should fail and pass to mapped plugin
- method_name = 'saml2'
- auth_data = {'methods': [method_name]}
- # put the method name in the payload so its easier to correlate
- # method name with payload
- auth_data[method_name] = {'protocol': method_name}
- auth_data = {'identity': auth_data}
- auth_info = auth.controllers.AuthInfo.create(None, auth_data)
- auth_context = {'extras': {},
- 'method_names': [],
- 'user_id': uuid.uuid4().hex}
- environment = {'environment': {'REMOTE_USER': 'foo@idp.com'}}
- self.api.authenticate(environment, auth_info, auth_context)
- # make sure Mapped plugin got invoked with the correct payload
- ((context, auth_payload, auth_context),
- kwargs) = authenticate.call_args
- self.assertEqual(method_name, auth_payload['protocol'])
-
- def test_supporting_multiple_methods(self):
- for method_name in ['saml2', 'openid', 'x509']:
- self._test_mapped_invocation_with_method_name(method_name)
diff --git a/keystone-moon/keystone/tests/unit/test_backend.py b/keystone-moon/keystone/tests/unit/test_backend.py
deleted file mode 100644
index 302fc2c2..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend.py
+++ /dev/null
@@ -1,6851 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import datetime
-import hashlib
-import uuid
-
-from keystoneclient.common import cms
-import mock
-from oslo_config import cfg
-from oslo_utils import timeutils
-import six
-from six.moves import range
-from testtools import matchers
-
-from keystone.catalog import core
-from keystone.common import driver_hints
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import filtering
-from keystone.tests.unit import utils as test_utils
-from keystone.token import provider
-
-
-CONF = cfg.CONF
-DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
-NULL_OBJECT = object()
-
-
-class AssignmentTestHelperMixin(object):
- """Mixin class to aid testing of assignments.
-
- This class supports data driven test plans that enable:
-
- - Creation of initial entities, such as domains, users, groups, projects
- and roles
- - Creation of assignments referencing the above entities
- - A set of input parameters and expected outputs to list_role_assignments
- based on the above test data
-
- A test plan is a dict of the form:
-
- test_plan = {
- entities: details and number of entities,
- group_memberships: group-user entity memberships,
- assignments: list of assignments to create,
- tests: list of pairs of input params and expected outputs}
-
- An example test plan:
-
- test_plan = {
- # First, create the entities required. Entities are specified by
- # a dict with the key being the entity type and the value an
- # entity specification which can be one of:
- #
- # - a simple number, e.g. {'users': 3} creates 3 users
- # - a dict where more information regarding the contents of the entity
- # is required, e.g. {'domains' : {'users : 3}} creates a domain
- # with three users
- # - a list of entity specifications if multiple are required
- #
- # The following creates a domain that contains a single user, group and
- # project, as well as creating three roles.
-
- 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1},
- 'roles': 3},
-
- # If it is required that an existing domain be used for the new
- # entities, then the id of that domain can be included in the
- # domain dict. For example, if alternatively we wanted to add 3 users
- # to the default domain, add a second domain containing 3 projects as
- # well as 5 additional empty domains, the entities would be defined as:
- #
- # 'entities': {'domains': [{'id': DEFAULT_DOMAIN, 'users': 3},
- # {'projects': 3}, 5]},
- #
- # A project hierarchy can be specified within the 'projects' section by
- # nesting the 'project' key, for example to create a project with three
- # sub-projects you would use:
-
- 'projects': {'project': 3}
-
- # A more complex hierarchy can also be defined, for example the
- # following would define three projects each containing a
- # sub-project, each of which contain a further three sub-projects.
-
- 'projects': [{'project': {'project': 3}},
- {'project': {'project': 3}},
- {'project': {'project': 3}}]
-
- # A list of groups and their members. In this case make users with
- # index 0 and 1 members of group with index 0. Users and Groups are
- # indexed in the order they appear in the 'entities' key above.
-
- 'group_memberships': [{'group': 0, 'users': [0, 1]}]
-
- # Next, create assignments between the entities, referencing the
- # entities by index, i.e. 'user': 0 refers to user[0]. Entities are
- # indexed in the order they appear in the 'entities' key above within
- # their entity type.
-
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'user': 0, 'role': 2, 'project': 0}],
-
- # Finally, define an array of tests where list_role_assignment() is
- # called with the given input parameters and the results are then
- # confirmed to be as given in 'results'. Again, all entities are
- # referenced by index.
-
- 'tests': [
- {'params': {},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'user': 0, 'role': 2, 'project': 0}]},
- {'params': {'role': 2},
- 'results': [{'group': 0, 'role': 2, 'domain': 0},
- {'user': 0, 'role': 2, 'project': 0}]}]
-
- # The 'params' key also supports the 'effective' and
- # 'inherited_to_projects' options to list_role_assignments.}
-
- """
- def _handle_project_spec(self, test_data, domain_id, project_spec,
- parent_id=None):
- """Handle the creation of a project or hierarchy of projects.
-
- project_spec may either be a count of the number of projects to
- create, or it may be a list of the form:
-
- [{'project': project_spec}, {'project': project_spec}, ...]
-
- This method is called recursively to handle the creation of a
- hierarchy of projects.
-
- """
- def _create_project(domain_id, parent_id):
- new_project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain_id, 'parent_id': parent_id}
- new_project = self.resource_api.create_project(new_project['id'],
- new_project)
- return new_project
-
- if isinstance(project_spec, list):
- for this_spec in project_spec:
- self._handle_project_spec(
- test_data, domain_id, this_spec, parent_id=parent_id)
- elif isinstance(project_spec, dict):
- new_proj = _create_project(domain_id, parent_id)
- test_data['projects'].append(new_proj)
- self._handle_project_spec(
- test_data, domain_id, project_spec['project'],
- parent_id=new_proj['id'])
- else:
- for _ in range(project_spec):
- test_data['projects'].append(
- _create_project(domain_id, parent_id))
-
- def _handle_domain_spec(self, test_data, domain_spec):
- """Handle the creation of domains and their contents.
-
- domain_spec may either be a count of the number of empty domains to
- create, a dict describing the domain contents, or a list of
- domain_specs.
-
- In the case when a list is provided, this method calls itself
- recursively to handle the list elements.
-
- This method will insert any entities created into test_data
-
- """
- def _create_domain(domain_id=None):
- if domain_id is None:
- new_domain = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'],
- new_domain)
- return new_domain
- else:
- # The test plan specified an existing domain to use
- return self.resource_api.get_domain(domain_id)
-
- def _create_entity_in_domain(entity_type, domain_id):
- """Create a user or group entity in the domain."""
-
- new_entity = {'name': uuid.uuid4().hex, 'domain_id': domain_id}
- if entity_type == 'users':
- new_entity = self.identity_api.create_user(new_entity)
- elif entity_type == 'groups':
- new_entity = self.identity_api.create_group(new_entity)
- else:
- # Must be a bad test plan
- raise exception.NotImplemented()
- return new_entity
-
- if isinstance(domain_spec, list):
- for x in domain_spec:
- self._handle_domain_spec(test_data, x)
- elif isinstance(domain_spec, dict):
- # If there is a domain ID specified, then use it
- the_domain = _create_domain(domain_spec.get('id'))
- test_data['domains'].append(the_domain)
- for entity_type, value in domain_spec.items():
- if entity_type == 'id':
- # We already used this above to determine whether to
- # use and existing domain
- continue
- if entity_type == 'projects':
- # If it's projects, we need to handle the potential
- # specification of a project hierarchy
- self._handle_project_spec(
- test_data, the_domain['id'], value)
- else:
- # It's a count of number of entities
- for _ in range(value):
- test_data[entity_type].append(
- _create_entity_in_domain(
- entity_type, the_domain['id']))
- else:
- for _ in range(domain_spec):
- test_data['domains'].append(_create_domain())
-
- def create_entities(self, entity_pattern):
- """Create the entities specified in the test plan.
-
- Process the 'entities' key in the test plan, creating the requested
- entities. Each created entity will be added to the array of entities
- stored in the returned test_data object, e.g.:
-
- test_data['users'] = [user[0], user[1]....]
-
- """
- def _create_role():
- new_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- return self.role_api.create_role(new_role['id'], new_role)
-
- test_data = {}
- for entity in ['users', 'groups', 'domains', 'projects', 'roles']:
- test_data[entity] = []
-
- # Create any domains requested and, if specified, any entities within
- # those domains
- if 'domains' in entity_pattern:
- self._handle_domain_spec(test_data, entity_pattern['domains'])
-
- # Create any roles requested
- if 'roles' in entity_pattern:
- for _ in range(entity_pattern['roles']):
- test_data['roles'].append(_create_role())
-
- return test_data
-
- def _convert_entity_shorthand(self, key, shorthand_data, reference_data):
- """Convert a shorthand entity description into a full ID reference.
-
- In test plan definitions, we allow a shorthand for referencing to an
- entity of the form:
-
- 'user': 0
-
- which is actually shorthand for:
-
- 'user_id': reference_data['users'][0]['id']
-
- This method converts the shorthand version into the full reference.
-
- """
- expanded_key = '%s_id' % key
- reference_index = '%ss' % key
- index_value = (
- reference_data[reference_index][shorthand_data[key]]['id'])
- return expanded_key, index_value
-
- def create_group_memberships(self, group_pattern, test_data):
- """Create the group memberships specified in the test plan."""
-
- for group_spec in group_pattern:
- # Each membership specification is a dict of the form:
- #
- # {'group': 0, 'users': [list of user indexes]}
- #
- # Add all users in the list to the specified group, first
- # converting from index to full entity ID.
- group_value = test_data['groups'][group_spec['group']]['id']
- for user_index in group_spec['users']:
- user_value = test_data['users'][user_index]['id']
- self.identity_api.add_user_to_group(user_value, group_value)
- return test_data
-
- def create_assignments(self, assignment_pattern, test_data):
- """Create the assignments specified in the test plan."""
-
- # First store how many assignments are already in the system,
- # so during the tests we can check the number of new assignments
- # created.
- test_data['initial_assignment_count'] = (
- len(self.assignment_api.list_role_assignments()))
-
- # Now create the new assignments in the test plan
- for assignment in assignment_pattern:
- # Each assignment is a dict of the form:
- #
- # { 'user': 0, 'project':1, 'role': 6}
- #
- # where the value of each item is the index into the array of
- # entities created earlier.
- #
- # We process the assignment dict to create the args required to
- # make the create_grant() call.
- args = {}
- for param in assignment:
- if param == 'inherited_to_projects':
- args[param] = assignment[param]
- else:
- # Turn 'entity : 0' into 'entity_id = ac6736ba873d'
- # where entity in user, group, project or domain
- key, value = self._convert_entity_shorthand(
- param, assignment, test_data)
- args[key] = value
- self.assignment_api.create_grant(**args)
- return test_data
-
- def execute_assignment_tests(self, test_plan, test_data):
- """Execute the test plan, based on the created test_data."""
-
- def check_results(expected, actual, param_arg_count):
- if param_arg_count == 0:
- # It was an unfiltered call, so default fixture assignments
- # might be polluting our answer - so we take into account
- # how many assignments there were before the test.
- self.assertEqual(
- len(expected) + test_data['initial_assignment_count'],
- len(actual))
- else:
- self.assertThat(actual, matchers.HasLength(len(expected)))
-
- for each_expected in expected:
- expected_assignment = {}
- for param in each_expected:
- if param == 'inherited_to_projects':
- expected_assignment[param] = each_expected[param]
- elif param == 'indirect':
- # We're expecting the result to contain an indirect
- # dict with the details how the role came to be placed
- # on this entity - so convert the key/value pairs of
- # that dict into real entity references.
- indirect_term = {}
- for indirect_param in each_expected[param]:
- key, value = self._convert_entity_shorthand(
- indirect_param, each_expected[param],
- test_data)
- indirect_term[key] = value
- expected_assignment[param] = indirect_term
- else:
- # Convert a simple shorthand entry into a full
- # entity reference
- key, value = self._convert_entity_shorthand(
- param, each_expected, test_data)
- expected_assignment[key] = value
- self.assertIn(expected_assignment, actual)
-
- # Go through each test in the array, processing the input params, which
- # we build into an args dict, and then call list_role_assignments. Then
- # check the results against those specified in the test plan.
- for test in test_plan.get('tests', []):
- args = {}
- for param in test['params']:
- if param in ['effective', 'inherited']:
- # Just pass the value into the args
- args[param] = test['params'][param]
- else:
- # Turn 'entity : 0' into 'entity_id = ac6736ba873d'
- # where entity in user, group, project or domain
- key, value = self._convert_entity_shorthand(
- param, test['params'], test_data)
- args[key] = value
- results = self.assignment_api.list_role_assignments(**args)
- check_results(test['results'], results, len(args))
-
- def execute_assignment_test_plan(self, test_plan):
- """Create entities, assignments and execute the test plan.
-
- The standard method to call to create entities and assignments and
- execute the tests as specified in the test_plan. The test_data
- dict is returned so that, if required, the caller can execute
- additional manual tests with the entities and assignments created.
-
- """
- test_data = self.create_entities(test_plan['entities'])
- if 'group_memberships' in test_plan:
- self.create_group_memberships(test_plan['group_memberships'],
- test_data)
- if 'assignments' in test_plan:
- test_data = self.create_assignments(test_plan['assignments'],
- test_data)
- self.execute_assignment_tests(test_plan, test_data)
- return test_data
-
-
-class IdentityTests(AssignmentTestHelperMixin):
- def _get_domain_fixture(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- return domain
-
- def _set_domain_scope(self, domain_id):
- # We only provide a domain scope if we have multiple drivers
- if CONF.identity.domain_specific_drivers_enabled:
- return domain_id
-
- def test_project_add_and_remove_user_role(self):
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_bar['id'])
- self.assertNotIn(self.user_two['id'], user_ids)
-
- self.assignment_api.add_role_to_user_and_project(
- tenant_id=self.tenant_bar['id'],
- user_id=self.user_two['id'],
- role_id=self.role_other['id'])
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_bar['id'])
- self.assertIn(self.user_two['id'], user_ids)
-
- self.assignment_api.remove_role_from_user_and_project(
- tenant_id=self.tenant_bar['id'],
- user_id=self.user_two['id'],
- role_id=self.role_other['id'])
-
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_bar['id'])
- self.assertNotIn(self.user_two['id'], user_ids)
-
- def test_remove_user_role_not_assigned(self):
- # Expect failure if attempt to remove a role that was never assigned to
- # the user.
- self.assertRaises(exception.RoleNotFound,
- self.assignment_api.
- remove_role_from_user_and_project,
- tenant_id=self.tenant_bar['id'],
- user_id=self.user_two['id'],
- role_id=self.role_other['id'])
-
- def test_authenticate_bad_user(self):
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=uuid.uuid4().hex,
- password=self.user_foo['password'])
-
- def test_authenticate_bad_password(self):
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=self.user_foo['id'],
- password=uuid.uuid4().hex)
-
- def test_authenticate(self):
- user_ref = self.identity_api.authenticate(
- context={},
- user_id=self.user_sna['id'],
- password=self.user_sna['password'])
- # NOTE(termie): the password field is left in user_sna to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- self.user_sna.pop('password')
- self.user_sna['enabled'] = True
- self.assertDictEqual(user_ref, self.user_sna)
-
- def test_authenticate_and_get_roles_no_metadata(self):
- user = {
- 'name': 'NO_META',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'no_meta2',
- }
- new_user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- new_user['id'])
- user_ref = self.identity_api.authenticate(
- context={},
- user_id=new_user['id'],
- password=user['password'])
- self.assertNotIn('password', user_ref)
- # NOTE(termie): the password field is left in user_sna to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- user.pop('password')
- self.assertDictContainsSubset(user, user_ref)
- role_list = self.assignment_api.get_roles_for_user_and_project(
- new_user['id'], self.tenant_baz['id'])
- self.assertEqual(1, len(role_list))
- self.assertIn(CONF.member_role_id, role_list)
-
- def test_authenticate_if_no_password_set(self):
- id_ = uuid.uuid4().hex
- user = {
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- }
- self.identity_api.create_user(user)
-
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=id_,
- password='password')
-
- def test_create_unicode_user_name(self):
- unicode_name = u'name \u540d\u5b57'
- user = {'name': unicode_name,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex}
- ref = self.identity_api.create_user(user)
- self.assertEqual(unicode_name, ref['name'])
-
- def test_get_project(self):
- tenant_ref = self.resource_api.get_project(self.tenant_bar['id'])
- self.assertDictEqual(tenant_ref, self.tenant_bar)
-
- def test_get_project_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- uuid.uuid4().hex)
-
- def test_get_project_by_name(self):
- tenant_ref = self.resource_api.get_project_by_name(
- self.tenant_bar['name'],
- DEFAULT_DOMAIN_ID)
- self.assertDictEqual(tenant_ref, self.tenant_bar)
-
- def test_get_project_by_name_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project_by_name,
- uuid.uuid4().hex,
- DEFAULT_DOMAIN_ID)
-
- def test_list_user_ids_for_project(self):
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_baz['id'])
- self.assertEqual(2, len(user_ids))
- self.assertIn(self.user_two['id'], user_ids)
- self.assertIn(self.user_badguy['id'], user_ids)
-
- def test_list_user_ids_for_project_no_duplicates(self):
- # Create user
- user_ref = {
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex,
- 'enabled': True}
- user_ref = self.identity_api.create_user(user_ref)
- # Create project
- project_ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(
- project_ref['id'], project_ref)
- # Create 2 roles and give user each role in project
- for i in range(2):
- role_ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.role_api.create_role(role_ref['id'], role_ref)
- self.assignment_api.add_role_to_user_and_project(
- user_id=user_ref['id'],
- tenant_id=project_ref['id'],
- role_id=role_ref['id'])
- # Get the list of user_ids in project
- user_ids = self.assignment_api.list_user_ids_for_project(
- project_ref['id'])
- # Ensure the user is only returned once
- self.assertEqual(1, len(user_ids))
-
- def test_get_project_user_ids_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.list_user_ids_for_project,
- uuid.uuid4().hex)
-
- def test_get_user(self):
- user_ref = self.identity_api.get_user(self.user_foo['id'])
- # NOTE(termie): the password field is left in user_foo to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- self.user_foo.pop('password')
- self.assertDictEqual(user_ref, self.user_foo)
-
- @unit.skip_if_cache_disabled('identity')
- def test_cache_layer_get_user(self):
- user = {
- 'name': uuid.uuid4().hex.lower(),
- 'domain_id': DEFAULT_DOMAIN_ID
- }
- self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- # cache the result.
- self.identity_api.get_user(ref['id'])
- # delete bypassing identity api
- domain_id, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(ref['id']))
- driver.delete_user(entity_id)
-
- self.assertDictEqual(ref, self.identity_api.get_user(ref['id']))
- self.identity_api.get_user.invalidate(self.identity_api, ref['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user, ref['id'])
- user = {
- 'name': uuid.uuid4().hex.lower(),
- 'domain_id': DEFAULT_DOMAIN_ID
- }
- self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- user['description'] = uuid.uuid4().hex
- # cache the result.
- self.identity_api.get_user(ref['id'])
- # update using identity api and get back updated user.
- user_updated = self.identity_api.update_user(ref['id'], user)
- self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
- user_updated)
- self.assertDictContainsSubset(
- self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
- user_updated)
-
- def test_get_user_404(self):
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- uuid.uuid4().hex)
-
- def test_get_user_by_name(self):
- user_ref = self.identity_api.get_user_by_name(
- self.user_foo['name'], DEFAULT_DOMAIN_ID)
- # NOTE(termie): the password field is left in user_foo to make
- # it easier to authenticate in tests, but should
- # not be returned by the api
- self.user_foo.pop('password')
- self.assertDictEqual(user_ref, self.user_foo)
-
- @unit.skip_if_cache_disabled('identity')
- def test_cache_layer_get_user_by_name(self):
- user = {
- 'name': uuid.uuid4().hex.lower(),
- 'domain_id': DEFAULT_DOMAIN_ID
- }
- self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- # delete bypassing the identity api.
- domain_id, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(ref['id']))
- driver.delete_user(entity_id)
-
- self.assertDictEqual(ref, self.identity_api.get_user_by_name(
- user['name'], DEFAULT_DOMAIN_ID))
- self.identity_api.get_user_by_name.invalidate(
- self.identity_api, user['name'], DEFAULT_DOMAIN_ID)
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user_by_name,
- user['name'], DEFAULT_DOMAIN_ID)
- user = {
- 'name': uuid.uuid4().hex.lower(),
- 'domain_id': DEFAULT_DOMAIN_ID
- }
- self.identity_api.create_user(user)
- ref = self.identity_api.get_user_by_name(user['name'],
- user['domain_id'])
- user['description'] = uuid.uuid4().hex
- user_updated = self.identity_api.update_user(ref['id'], user)
- self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
- user_updated)
- self.assertDictContainsSubset(
- self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
- user_updated)
-
- def test_get_user_by_name_404(self):
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user_by_name,
- uuid.uuid4().hex,
- DEFAULT_DOMAIN_ID)
-
- def test_create_duplicate_user_name_fails(self):
- user = {'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'fakepass',
- 'tenants': ['bar']}
- user = self.identity_api.create_user(user)
- self.assertRaises(exception.Conflict,
- self.identity_api.create_user,
- user)
-
- def test_create_duplicate_user_name_in_different_domains(self):
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- user1 = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex}
- user2 = {'name': user1['name'],
- 'domain_id': new_domain['id'],
- 'password': uuid.uuid4().hex}
- self.identity_api.create_user(user1)
- self.identity_api.create_user(user2)
-
- def test_move_user_between_domains(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- user = {'name': uuid.uuid4().hex,
- 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex}
- user = self.identity_api.create_user(user)
- user['domain_id'] = domain2['id']
- self.identity_api.update_user(user['id'], user)
-
- def test_move_user_between_domains_with_clashing_names_fails(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- # First, create a user in domain1
- user1 = {'name': uuid.uuid4().hex,
- 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex}
- user1 = self.identity_api.create_user(user1)
- # Now create a user in domain2 with a potentially clashing
- # name - which should work since we have domain separation
- user2 = {'name': user1['name'],
- 'domain_id': domain2['id'],
- 'password': uuid.uuid4().hex}
- user2 = self.identity_api.create_user(user2)
- # Now try and move user1 into the 2nd domain - which should
- # fail since the names clash
- user1['domain_id'] = domain2['id']
- self.assertRaises(exception.Conflict,
- self.identity_api.update_user,
- user1['id'],
- user1)
-
- def test_rename_duplicate_user_name_fails(self):
- user1 = {'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'fakepass',
- 'tenants': ['bar']}
- user2 = {'name': 'fake2',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'fakepass',
- 'tenants': ['bar']}
- self.identity_api.create_user(user1)
- user2 = self.identity_api.create_user(user2)
- user2['name'] = 'fake1'
- self.assertRaises(exception.Conflict,
- self.identity_api.update_user,
- user2['id'],
- user2)
-
- def test_update_user_id_fails(self):
- user = {'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'fakepass',
- 'tenants': ['bar']}
- user = self.identity_api.create_user(user)
- original_id = user['id']
- user['id'] = 'fake2'
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- original_id,
- user)
- user_ref = self.identity_api.get_user(original_id)
- self.assertEqual(original_id, user_ref['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- 'fake2')
-
- def test_create_duplicate_project_id_fails(self):
- tenant = {'id': 'fake1', 'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant)
- tenant['name'] = 'fake2'
- self.assertRaises(exception.Conflict,
- self.resource_api.create_project,
- 'fake1',
- tenant)
-
- def test_create_duplicate_project_name_fails(self):
- tenant = {'id': 'fake1', 'name': 'fake',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant)
- tenant['id'] = 'fake2'
- self.assertRaises(exception.Conflict,
- self.resource_api.create_project,
- 'fake1',
- tenant)
-
- def test_create_duplicate_project_name_in_different_domains(self):
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- tenant1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- tenant2 = {'id': uuid.uuid4().hex, 'name': tenant1['name'],
- 'domain_id': new_domain['id']}
- self.resource_api.create_project(tenant1['id'], tenant1)
- self.resource_api.create_project(tenant2['id'], tenant2)
-
- def test_move_project_between_domains(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project['id'], project)
- project['domain_id'] = domain2['id']
- self.resource_api.update_project(project['id'], project)
-
- def test_move_project_between_domains_with_clashing_names_fails(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- # First, create a project in domain1
- project1 = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
- # Now create a project in domain2 with a potentially clashing
- # name - which should work since we have domain separation
- project2 = {'id': uuid.uuid4().hex,
- 'name': project1['name'],
- 'domain_id': domain2['id']}
- self.resource_api.create_project(project2['id'], project2)
- # Now try and move project1 into the 2nd domain - which should
- # fail since the names clash
- project1['domain_id'] = domain2['id']
- self.assertRaises(exception.Conflict,
- self.resource_api.update_project,
- project1['id'],
- project1)
-
- def test_rename_duplicate_project_name_fails(self):
- tenant1 = {'id': 'fake1', 'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- tenant2 = {'id': 'fake2', 'name': 'fake2',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant1)
- self.resource_api.create_project('fake2', tenant2)
- tenant2['name'] = 'fake1'
- self.assertRaises(exception.Error,
- self.resource_api.update_project,
- 'fake2',
- tenant2)
-
- def test_update_project_id_does_nothing(self):
- tenant = {'id': 'fake1', 'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant)
- tenant['id'] = 'fake2'
- self.resource_api.update_project('fake1', tenant)
- tenant_ref = self.resource_api.get_project('fake1')
- self.assertEqual('fake1', tenant_ref['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- 'fake2')
-
- def test_list_role_assignments_unfiltered(self):
- """Test unfiltered listing of role assignments."""
-
- test_plan = {
- # Create a domain, with a user, group & project
- 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1},
- 'roles': 3},
- # Create a grant of each type (user/group on project/domain)
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}],
- 'tests': [
- # Check that we get back the 4 assignments
- {'params': {},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}]}
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_list_role_assignments_filtered_by_role(self):
- """Test listing of role assignments filtered by role ID."""
-
- test_plan = {
- # Create a user, group & project in the default domain
- 'entities': {'domains': {'id': DEFAULT_DOMAIN_ID,
- 'users': 1, 'groups': 1, 'projects': 1},
- 'roles': 3},
- # Create a grant of each type (user/group on project/domain)
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}],
- 'tests': [
- # Check that when filtering by role, we only get back those
- # that match
- {'params': {'role': 2},
- 'results': [{'group': 0, 'role': 2, 'domain': 0},
- {'group': 0, 'role': 2, 'project': 0}]}
- ]
- }
- test_data = self.execute_assignment_test_plan(test_plan)
-
- # Also test that list_role_assignments_for_role() gives the same answer
- assignment_list = self.assignment_api.list_role_assignments_for_role(
- role_id=test_data['roles'][2]['id'])
- self.assertThat(assignment_list, matchers.HasLength(2))
-
- # Now check that each of our two new entries are in the list
- self.assertIn(
- {'group_id': test_data['groups'][0]['id'],
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'role_id': test_data['roles'][2]['id']},
- assignment_list)
- self.assertIn(
- {'group_id': test_data['groups'][0]['id'],
- 'project_id': test_data['projects'][0]['id'],
- 'role_id': test_data['roles'][2]['id']},
- assignment_list)
-
- def test_list_group_role_assignment(self):
- # When a group role assignment is created and the role assignments are
- # listed then the group role assignment is included in the list.
-
- test_plan = {
- 'entities': {'domains': {'id': DEFAULT_DOMAIN_ID,
- 'groups': 1, 'projects': 1},
- 'roles': 1},
- 'assignments': [{'group': 0, 'role': 0, 'project': 0}],
- 'tests': [
- {'params': {},
- 'results': [{'group': 0, 'role': 0, 'project': 0}]}
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_list_role_assignments_bad_role(self):
- assignment_list = self.assignment_api.list_role_assignments_for_role(
- role_id=uuid.uuid4().hex)
- self.assertEqual([], assignment_list)
-
- def test_add_duplicate_role_grant(self):
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn(self.role_admin['id'], roles_ref)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
- self.assertRaises(exception.Conflict,
- self.assignment_api.add_role_to_user_and_project,
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_admin['id'])
-
- def test_get_role_by_user_and_project_with_user_in_group(self):
- """Test for get role by user and project, user was added into a group.
-
- Test Plan:
-
- - Create a user, a project & a group, add this user to group
- - Create roles and grant them to user and project
- - Check the role list get by the user and project was as expected
-
- """
- user_ref = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex,
- 'enabled': True}
- user_ref = self.identity_api.create_user(user_ref)
-
- project_ref = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(project_ref['id'], project_ref)
-
- group = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- group_id = self.identity_api.create_group(group)['id']
- self.identity_api.add_user_to_group(user_ref['id'], group_id)
-
- role_ref_list = []
- for i in range(2):
- role_ref = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role_ref['id'], role_ref)
- role_ref_list.append(role_ref)
-
- self.assignment_api.add_role_to_user_and_project(
- user_id=user_ref['id'],
- tenant_id=project_ref['id'],
- role_id=role_ref['id'])
-
- role_list = self.assignment_api.get_roles_for_user_and_project(
- user_id=user_ref['id'],
- tenant_id=project_ref['id'])
-
- self.assertEqual(set(role_list),
- set([r['id'] for r in role_ref_list]))
-
- def test_get_role_by_user_and_project(self):
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn(self.role_admin['id'], roles_ref)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertIn(self.role_admin['id'], roles_ref)
- self.assertNotIn('member', roles_ref)
-
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], 'member')
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertIn(self.role_admin['id'], roles_ref)
- self.assertIn('member', roles_ref)
-
- def test_get_roles_for_user_and_domain(self):
- """Test for getting roles for user on a domain.
-
- Test Plan:
-
- - Create a domain, with 2 users
- - Check no roles yet exit
- - Give user1 two roles on the domain, user2 one role
- - Get roles on user1 and the domain - maybe sure we only
- get back the 2 roles on user1
- - Delete both roles from user1
- - Check we get no roles back for user1 on domain
-
- """
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user1 = self.identity_api.create_user(new_user1)
- new_user2 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user2 = self.identity_api.create_user(new_user2)
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user1['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- # Now create the grants (roles are defined in default_fixtures)
- self.assignment_api.create_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='member')
- self.assignment_api.create_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='other')
- self.assignment_api.create_grant(user_id=new_user2['id'],
- domain_id=new_domain['id'],
- role_id='admin')
- # Read back the roles for user1 on domain
- roles_ids = self.assignment_api.get_roles_for_user_and_domain(
- new_user1['id'], new_domain['id'])
- self.assertEqual(2, len(roles_ids))
- self.assertIn(self.role_member['id'], roles_ids)
- self.assertIn(self.role_other['id'], roles_ids)
-
- # Now delete both grants for user1
- self.assignment_api.delete_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='member')
- self.assignment_api.delete_grant(user_id=new_user1['id'],
- domain_id=new_domain['id'],
- role_id='other')
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user1['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
-
- def test_get_roles_for_user_and_domain_404(self):
- """Test errors raised when getting roles for user on a domain.
-
- Test Plan:
-
- - Check non-existing user gives UserNotFound
- - Check non-existing domain gives DomainNotFound
-
- """
- new_domain = self._get_domain_fixture()
- new_user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user1 = self.identity_api.create_user(new_user1)
-
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.get_roles_for_user_and_domain,
- uuid.uuid4().hex,
- new_domain['id'])
-
- self.assertRaises(exception.DomainNotFound,
- self.assignment_api.get_roles_for_user_and_domain,
- new_user1['id'],
- uuid.uuid4().hex)
-
- def test_get_roles_for_user_and_project_404(self):
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.get_roles_for_user_and_project,
- uuid.uuid4().hex,
- self.tenant_bar['id'])
-
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.get_roles_for_user_and_project,
- self.user_foo['id'],
- uuid.uuid4().hex)
-
- def test_add_role_to_user_and_project_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.add_role_to_user_and_project,
- self.user_foo['id'],
- uuid.uuid4().hex,
- self.role_admin['id'])
-
- self.assertRaises(exception.RoleNotFound,
- self.assignment_api.add_role_to_user_and_project,
- self.user_foo['id'],
- self.tenant_bar['id'],
- uuid.uuid4().hex)
-
- def test_add_role_to_user_and_project_no_user(self):
- # If add_role_to_user_and_project and the user doesn't exist, then
- # no error.
- user_id_not_exist = uuid.uuid4().hex
- self.assignment_api.add_role_to_user_and_project(
- user_id_not_exist, self.tenant_bar['id'], self.role_admin['id'])
-
- def test_remove_role_from_user_and_project(self):
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], 'member')
- self.assignment_api.remove_role_from_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], 'member')
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn('member', roles_ref)
- self.assertRaises(exception.NotFound,
- self.assignment_api.
- remove_role_from_user_and_project,
- self.user_foo['id'],
- self.tenant_bar['id'],
- 'member')
-
- def test_get_role_grant_by_user_and_project(self):
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual(1, len(roles_ref))
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_admin['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'])
- self.assertIn(self.role_admin['id'],
- [role_ref['id'] for role_ref in roles_ref])
-
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_bar['id'])
-
- roles_ref_ids = []
- for ref in roles_ref:
- roles_ref_ids.append(ref['id'])
- self.assertIn(self.role_admin['id'], roles_ref_ids)
- self.assertIn('member', roles_ref_ids)
-
- def test_remove_role_grant_from_user_and_project(self):
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'])
- self.assertDictEqual(roles_ref[0], self.role_member)
-
- self.assignment_api.delete_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- def test_get_role_assignment_by_project_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- group_id=uuid.uuid4().hex,
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- def test_get_role_assignment_by_domain_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- user_id=self.user_foo['id'],
- domain_id=self.domain_default['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.check_grant_role_id,
- group_id=uuid.uuid4().hex,
- domain_id=self.domain_default['id'],
- role_id='member')
-
- def test_del_role_assignment_by_project_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=uuid.uuid4().hex,
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- def test_del_role_assignment_by_domain_not_found(self):
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=self.user_foo['id'],
- domain_id=self.domain_default['id'],
- role_id='member')
-
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=uuid.uuid4().hex,
- domain_id=self.domain_default['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_project(self):
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- new_user = {'name': 'new_user', 'password': 'secret',
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertDictEqual(roles_ref[0], self.role_member)
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_domain(self):
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
-
- self.assignment_api.create_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(roles_ref[0], self.role_member)
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- def test_get_and_remove_correct_role_grant_from_a_mix(self):
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': new_domain['id']}
- self.resource_api.create_project(new_project['id'], new_project)
- new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- new_group2 = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
- new_group2 = self.identity_api.create_group(new_group2)
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user = self.identity_api.create_user(new_user)
- new_user2 = {'name': 'new_user2', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user2 = self.identity_api.create_user(new_user2)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- # First check we have no grants
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- # Now add the grant we are going to test for, and some others as
- # well just to make sure we get back the right one
- self.assignment_api.create_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- self.assignment_api.create_grant(group_id=new_group2['id'],
- domain_id=new_domain['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(user_id=new_user2['id'],
- domain_id=new_domain['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(group_id=new_group['id'],
- project_id=new_project['id'],
- role_id=self.role_admin['id'])
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(roles_ref[0], self.role_member)
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_user_and_domain(self):
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- new_user = {'name': 'new_user', 'password': 'secret',
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user = self.identity_api.create_user(new_user)
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=new_user['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(roles_ref[0], self.role_member)
-
- self.assignment_api.delete_grant(user_id=new_user['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=new_user['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=new_user['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
- group1_domain1_role = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.role_api.create_role(group1_domain1_role['id'],
- group1_domain1_role)
- group1_domain2_role = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.role_api.create_role(group1_domain2_role['id'],
- group1_domain2_role)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- group1 = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex}
- group1 = self.identity_api.create_group(group1)
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=group1_domain1_role['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=group1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertDictEqual(roles_ref[0], group1_domain1_role)
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain2['id'])
- self.assertDictEqual(roles_ref[0], group1_domain2_role)
-
- self.assignment_api.delete_grant(group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=group1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=group1_domain2_role['id'])
-
- def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
- user1_domain1_role = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.role_api.create_role(user1_domain1_role['id'], user1_domain1_role)
- user1_domain2_role = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.role_api.create_role(user1_domain2_role['id'], user1_domain2_role)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=user1_domain1_role['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain2['id'],
- role_id=user1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertDictEqual(roles_ref[0], user1_domain1_role)
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain2['id'])
- self.assertDictEqual(roles_ref[0], user1_domain2_role)
-
- self.assignment_api.delete_grant(user_id=user1['id'],
- domain_id=domain2['id'],
- role_id=user1_domain2_role['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain2['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=user1['id'],
- domain_id=domain2['id'],
- role_id=user1_domain2_role['id'])
-
- def test_role_grant_by_group_and_cross_domain_project(self):
- role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role1['id'], role1)
- role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role2['id'], role2)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group1 = self.identity_api.create_group(group1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain2['id']}
- self.resource_api.create_project(project1['id'], project1)
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role2['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
-
- roles_ref_ids = []
- for ref in roles_ref:
- roles_ref_ids.append(ref['id'])
- self.assertIn(role1['id'], roles_ref_ids)
- self.assertIn(role2['id'], roles_ref_ids)
-
- self.assignment_api.delete_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- self.assertDictEqual(roles_ref[0], role2)
-
- def test_role_grant_by_user_and_cross_domain_project(self):
- role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role1['id'], role1)
- role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role2['id'], role2)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain2['id']}
- self.resource_api.create_project(project1['id'], project1)
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role2['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
-
- roles_ref_ids = []
- for ref in roles_ref:
- roles_ref_ids.append(ref['id'])
- self.assertIn(role1['id'], roles_ref_ids)
- self.assertIn(role2['id'], roles_ref_ids)
-
- self.assignment_api.delete_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- self.assertDictEqual(roles_ref[0], role2)
-
- def test_delete_user_grant_no_user(self):
- # Can delete a grant where the user doesn't exist.
- role_id = uuid.uuid4().hex
- role = {'id': role_id, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role_id, role)
-
- user_id = uuid.uuid4().hex
-
- self.assignment_api.create_grant(role_id, user_id=user_id,
- project_id=self.tenant_bar['id'])
-
- self.assignment_api.delete_grant(role_id, user_id=user_id,
- project_id=self.tenant_bar['id'])
-
- def test_delete_group_grant_no_group(self):
- # Can delete a grant where the group doesn't exist.
- role_id = uuid.uuid4().hex
- role = {'id': role_id, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role_id, role)
-
- group_id = uuid.uuid4().hex
-
- self.assignment_api.create_grant(role_id, group_id=group_id,
- project_id=self.tenant_bar['id'])
-
- self.assignment_api.delete_grant(role_id, group_id=group_id,
- project_id=self.tenant_bar['id'])
-
- def test_grant_crud_throws_exception_if_invalid_role(self):
- """Ensure RoleNotFound thrown if role does not exist."""
-
- def assert_role_not_found_exception(f, **kwargs):
- self.assertRaises(exception.RoleNotFound, f,
- role_id=uuid.uuid4().hex, **kwargs)
-
- user = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex, 'enabled': True}
- user_resp = self.identity_api.create_user(user)
- group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True}
- group_resp = self.identity_api.create_group(group)
- project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- project_resp = self.resource_api.create_project(project['id'], project)
-
- for manager_call in [self.assignment_api.create_grant,
- self.assignment_api.get_grant,
- self.assignment_api.delete_grant]:
- assert_role_not_found_exception(
- manager_call,
- user_id=user_resp['id'], project_id=project_resp['id'])
- assert_role_not_found_exception(
- manager_call,
- group_id=group_resp['id'], project_id=project_resp['id'])
- assert_role_not_found_exception(
- manager_call,
- user_id=user_resp['id'], domain_id=DEFAULT_DOMAIN_ID)
- assert_role_not_found_exception(
- manager_call,
- group_id=group_resp['id'], domain_id=DEFAULT_DOMAIN_ID)
-
- def test_multi_role_grant_by_user_group_on_project_domain(self):
- role_list = []
- for _ in range(10):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group1 = self.identity_api.create_group(group1)
- group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group2 = self.identity_api.create_group(group2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
-
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user1['id'],
- group2['id'])
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[3]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[4]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[5]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role_list[6]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role_list[7]['id'])
- roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[0], roles_ref)
- self.assertIn(role_list[1], roles_ref)
- roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[2], roles_ref)
- self.assertIn(role_list[3], roles_ref)
- roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[4], roles_ref)
- self.assertIn(role_list[5], roles_ref)
- roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(2, len(roles_ref))
- self.assertIn(role_list[6], roles_ref)
- self.assertIn(role_list[7], roles_ref)
-
- # Now test the alternate way of getting back lists of grants,
- # where user and group roles are combined. These should match
- # the above results.
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(4, len(combined_list))
- self.assertIn(role_list[4]['id'], combined_list)
- self.assertIn(role_list[5]['id'], combined_list)
- self.assertIn(role_list[6]['id'], combined_list)
- self.assertIn(role_list[7]['id'], combined_list)
-
- combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
- user1['id'], domain1['id'])
- self.assertEqual(4, len(combined_role_list))
- self.assertIn(role_list[0]['id'], combined_role_list)
- self.assertIn(role_list[1]['id'], combined_role_list)
- self.assertIn(role_list[2]['id'], combined_role_list)
- self.assertIn(role_list[3]['id'], combined_role_list)
-
- def test_multi_group_grants_on_project_domain(self):
- """Test multiple group roles for user on project and domain.
-
- Test Plan:
-
- - Create 6 roles
- - Create a domain, with a project, user and two groups
- - Make the user a member of both groups
- - Check no roles yet exit
- - Assign a role to each user and both groups on both the
- project and domain
- - Get a list of effective roles for the user on both the
- project and domain, checking we get back the correct three
- roles
-
- """
- role_list = []
- for _ in range(6):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group1 = self.identity_api.create_group(group1)
- group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group2 = self.identity_api.create_group(group2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
-
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user1['id'],
- group2['id'])
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[3]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role_list[4]['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- project_id=project1['id'],
- role_id=role_list[5]['id'])
-
- # Read by the roles, ensuring we get the correct 3 roles for
- # both project and domain
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(3, len(combined_list))
- self.assertIn(role_list[3]['id'], combined_list)
- self.assertIn(role_list[4]['id'], combined_list)
- self.assertIn(role_list[5]['id'], combined_list)
-
- combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
- user1['id'], domain1['id'])
- self.assertEqual(3, len(combined_role_list))
- self.assertIn(role_list[0]['id'], combined_role_list)
- self.assertIn(role_list[1]['id'], combined_role_list)
- self.assertIn(role_list[2]['id'], combined_role_list)
-
- def test_delete_role_with_user_and_group_grants(self):
- role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role1['id'], role1)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group1 = self.identity_api.create_group(group1)
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- self.role_api.delete_role(role1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(0, len(roles_ref))
-
- def test_delete_user_with_group_project_domain_links(self):
- role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role1['id'], role1)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group1 = self.identity_api.create_group(group1)
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- self.identity_api.add_user_to_group(user_id=user1['id'],
- group_id=group1['id'])
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- self.identity_api.check_user_in_group(
- user_id=user1['id'],
- group_id=group1['id'])
- self.identity_api.delete_user(user1['id'])
- self.assertRaises(exception.NotFound,
- self.identity_api.check_user_in_group,
- user1['id'],
- group1['id'])
-
- def test_delete_group_with_user_project_domain_links(self):
- role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role1['id'], role1)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group1 = self.identity_api.create_group(group1)
-
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role1['id'])
- self.identity_api.add_user_to_group(user_id=user1['id'],
- group_id=group1['id'])
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- project_id=project1['id'])
- self.assertEqual(1, len(roles_ref))
- roles_ref = self.assignment_api.list_grants(
- group_id=group1['id'],
- domain_id=domain1['id'])
- self.assertEqual(1, len(roles_ref))
- self.identity_api.check_user_in_group(
- user_id=user1['id'],
- group_id=group1['id'])
- self.identity_api.delete_group(group1['id'])
- self.identity_api.get_user(user1['id'])
-
- def test_list_role_assignment_by_domain(self):
- """Test listing of role assignment filtered by domain."""
-
- test_plan = {
- # A domain with 3 users, 1 group, a spoiler domain and 2 roles.
- 'entities': {'domains': [{'users': 3, 'groups': 1}, 1],
- 'roles': 2},
- # Users 1 & 2 are in the group
- 'group_memberships': [{'group': 0, 'users': [1, 2]}],
- # Assign a role for user 0 and the group
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'group': 0, 'role': 1, 'domain': 0}],
- 'tests': [
- # List all effective assignments for domain[0].
- # Should get one direct user role and user roles for each of
- # the users in the group.
- {'params': {'domain': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 1, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}},
- {'user': 2, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}}
- ]},
- # Using domain[1] should return nothing
- {'params': {'domain': 1, 'effective': True},
- 'results': []},
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_list_role_assignment_by_user_with_domain_group_roles(self):
- """Test listing assignments by user, with group roles on a domain."""
-
- test_plan = {
- # A domain with 3 users, 3 groups, a spoiler domain
- # plus 3 roles.
- 'entities': {'domains': [{'users': 3, 'groups': 3}, 1],
- 'roles': 3},
- # Users 1 & 2 are in the group 0, User 1 also in group 1
- 'group_memberships': [{'group': 0, 'users': [0, 1]},
- {'group': 1, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'group': 0, 'role': 1, 'domain': 0},
- {'group': 1, 'role': 2, 'domain': 0},
- # ...and two spoiler assignments
- {'user': 1, 'role': 1, 'domain': 0},
- {'group': 2, 'role': 2, 'domain': 0}],
- 'tests': [
- # List all effective assignments for user[0].
- # Should get one direct user role and a user roles for each of
- # groups 0 and 1
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}},
- {'user': 0, 'role': 2, 'domain': 0,
- 'indirect': {'group': 1}}
- ]},
- # Adding domain[0] as a filter should return the same data
- {'params': {'user': 0, 'domain': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'domain': 0,
- 'indirect': {'group': 0}},
- {'user': 0, 'role': 2, 'domain': 0,
- 'indirect': {'group': 1}}
- ]},
- # Using domain[1] should return nothing
- {'params': {'user': 0, 'domain': 1, 'effective': True},
- 'results': []},
- # Using user[2] should return nothing
- {'params': {'user': 2, 'domain': 0, 'effective': True},
- 'results': []},
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_delete_domain_with_user_group_project_links(self):
- # TODO(chungg):add test case once expected behaviour defined
- pass
-
- def test_add_user_to_project(self):
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = self.assignment_api.list_projects_for_user(
- self.user_foo['id'])
- self.assertIn(self.tenant_baz, tenants)
-
- def test_add_user_to_project_missing_default_role(self):
- self.role_api.delete_role(CONF.member_role_id)
- self.assertRaises(exception.RoleNotFound,
- self.role_api.get_role,
- CONF.member_role_id)
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = (
- self.assignment_api.list_projects_for_user(self.user_foo['id']))
- self.assertIn(self.tenant_baz, tenants)
- default_role = self.role_api.get_role(CONF.member_role_id)
- self.assertIsNotNone(default_role)
-
- def test_add_user_to_project_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.add_user_to_project,
- uuid.uuid4().hex,
- self.user_foo['id'])
-
- def test_add_user_to_project_no_user(self):
- # If add_user_to_project and the user doesn't exist, then
- # no error.
- user_id_not_exist = uuid.uuid4().hex
- self.assignment_api.add_user_to_project(self.tenant_bar['id'],
- user_id_not_exist)
-
- def test_remove_user_from_project(self):
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = self.assignment_api.list_projects_for_user(
- self.user_foo['id'])
- self.assertNotIn(self.tenant_baz, tenants)
-
- def test_remove_user_from_project_race_delete_role(self):
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- self.user_foo['id'])
- self.assignment_api.add_role_to_user_and_project(
- tenant_id=self.tenant_baz['id'],
- user_id=self.user_foo['id'],
- role_id=self.role_other['id'])
-
- # Mock a race condition, delete a role after
- # get_roles_for_user_and_project() is called in
- # remove_user_from_project().
- roles = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_baz['id'])
- self.role_api.delete_role(self.role_other['id'])
- self.assignment_api.get_roles_for_user_and_project = mock.Mock(
- return_value=roles)
- self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
- self.user_foo['id'])
- tenants = self.assignment_api.list_projects_for_user(
- self.user_foo['id'])
- self.assertNotIn(self.tenant_baz, tenants)
-
- def test_remove_user_from_project_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.assignment_api.remove_user_from_project,
- uuid.uuid4().hex,
- self.user_foo['id'])
-
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.remove_user_from_project,
- self.tenant_bar['id'],
- uuid.uuid4().hex)
-
- self.assertRaises(exception.NotFound,
- self.assignment_api.remove_user_from_project,
- self.tenant_baz['id'],
- self.user_foo['id'])
-
- def test_list_user_project_ids_404(self):
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.list_projects_for_user,
- uuid.uuid4().hex)
-
- def test_update_project_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.update_project,
- uuid.uuid4().hex,
- dict())
-
- def test_delete_project_404(self):
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.delete_project,
- uuid.uuid4().hex)
-
- def test_update_user_404(self):
- user_id = uuid.uuid4().hex
- self.assertRaises(exception.UserNotFound,
- self.identity_api.update_user,
- user_id,
- {'id': user_id,
- 'domain_id': DEFAULT_DOMAIN_ID})
-
- def test_delete_user_with_project_association(self):
- user = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex}
- user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_bar['id'],
- user['id'])
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.list_projects_for_user,
- user['id'])
-
- def test_delete_user_with_project_roles(self):
- user = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex}
- user = self.identity_api.create_user(user)
- self.assignment_api.add_role_to_user_and_project(
- user['id'],
- self.tenant_bar['id'],
- self.role_member['id'])
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.list_projects_for_user,
- user['id'])
-
- def test_delete_user_404(self):
- self.assertRaises(exception.UserNotFound,
- self.identity_api.delete_user,
- uuid.uuid4().hex)
-
- def test_delete_role_404(self):
- self.assertRaises(exception.RoleNotFound,
- self.role_api.delete_role,
- uuid.uuid4().hex)
-
- def test_create_update_delete_unicode_project(self):
- unicode_project_name = u'name \u540d\u5b57'
- project = {'id': uuid.uuid4().hex,
- 'name': unicode_project_name,
- 'description': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id}
- self.resource_api.create_project(project['id'], project)
- self.resource_api.update_project(project['id'], project)
- self.resource_api.delete_project(project['id'])
-
- def test_create_project_with_no_enabled_field(self):
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex.lower(),
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(ref['id'], ref)
-
- project = self.resource_api.get_project(ref['id'])
- self.assertIs(project['enabled'], True)
-
- def test_create_project_long_name_fails(self):
- tenant = {'id': 'fake1', 'name': 'a' * 65,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- tenant['id'],
- tenant)
-
- def test_create_project_blank_name_fails(self):
- tenant = {'id': 'fake1', 'name': '',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- tenant['id'],
- tenant)
-
- def test_create_project_invalid_name_fails(self):
- tenant = {'id': 'fake1', 'name': None,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- tenant['id'],
- tenant)
- tenant = {'id': 'fake1', 'name': 123,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- tenant['id'],
- tenant)
-
- def test_update_project_blank_name_fails(self):
- tenant = {'id': 'fake1', 'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant)
- tenant['name'] = ''
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- tenant['id'],
- tenant)
-
- def test_update_project_long_name_fails(self):
- tenant = {'id': 'fake1', 'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant)
- tenant['name'] = 'a' * 65
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- tenant['id'],
- tenant)
-
- def test_update_project_invalid_name_fails(self):
- tenant = {'id': 'fake1', 'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant)
- tenant['name'] = None
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- tenant['id'],
- tenant)
-
- tenant['name'] = 123
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- tenant['id'],
- tenant)
-
- def test_create_user_long_name_fails(self):
- user = {'name': 'a' * 256,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_create_user_blank_name_fails(self):
- user = {'name': '',
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_create_user_missed_password(self):
- user = {'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
- self.identity_api.get_user(user['id'])
- # Make sure the user is not allowed to login
- # with a password that is empty string or None
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password='')
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password=None)
-
- def test_create_user_none_password(self):
- user = {'name': 'fake1', 'password': None,
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
- self.identity_api.get_user(user['id'])
- # Make sure the user is not allowed to login
- # with a password that is empty string or None
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password='')
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password=None)
-
- def test_create_user_invalid_name_fails(self):
- user = {'name': None,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- user = {'name': 123,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_update_project_invalid_enabled_type_string(self):
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'enabled': True,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertEqual(True, project_ref['enabled'])
-
- # Strings are not valid boolean values
- project['enabled'] = "false"
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'],
- project)
-
- def test_create_project_invalid_enabled_type_string(self):
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- # invalid string value
- 'enabled': "true"}
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_create_project_invalid_domain_id(self):
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': uuid.uuid4().hex,
- 'enabled': True}
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.create_project,
- project['id'],
- project)
-
- def test_create_user_invalid_enabled_type_string(self):
- user = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex,
- # invalid string value
- 'enabled': "true"}
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
-
- def test_update_user_long_name_fails(self):
- user = {'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
- user['name'] = 'a' * 256
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_update_user_blank_name_fails(self):
- user = {'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
- user['name'] = ''
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_update_user_invalid_name_fails(self):
- user = {'name': 'fake1',
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
-
- user['name'] = None
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- user['name'] = 123
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_list_users(self):
- users = self.identity_api.list_users(
- domain_scope=self._set_domain_scope(DEFAULT_DOMAIN_ID))
- self.assertEqual(len(default_fixtures.USERS), len(users))
- user_ids = set(user['id'] for user in users)
- expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
- for user in default_fixtures.USERS)
- for user_ref in users:
- self.assertNotIn('password', user_ref)
- self.assertEqual(expected_user_ids, user_ids)
-
- def test_list_groups(self):
- group1 = {
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'name': uuid.uuid4().hex}
- group2 = {
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'name': uuid.uuid4().hex}
- group1 = self.identity_api.create_group(group1)
- group2 = self.identity_api.create_group(group2)
- groups = self.identity_api.list_groups(
- domain_scope=self._set_domain_scope(DEFAULT_DOMAIN_ID))
- self.assertEqual(2, len(groups))
- group_ids = []
- for group in groups:
- group_ids.append(group.get('id'))
- self.assertIn(group1['id'], group_ids)
- self.assertIn(group2['id'], group_ids)
-
- def test_list_domains(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- self.resource_api.create_domain(domain2['id'], domain2)
- domains = self.resource_api.list_domains()
- self.assertEqual(3, len(domains))
- domain_ids = []
- for domain in domains:
- domain_ids.append(domain.get('id'))
- self.assertIn(DEFAULT_DOMAIN_ID, domain_ids)
- self.assertIn(domain1['id'], domain_ids)
- self.assertIn(domain2['id'], domain_ids)
-
- def test_list_projects(self):
- projects = self.resource_api.list_projects()
- self.assertEqual(4, len(projects))
- project_ids = []
- for project in projects:
- project_ids.append(project.get('id'))
- self.assertIn(self.tenant_bar['id'], project_ids)
- self.assertIn(self.tenant_baz['id'], project_ids)
-
- def test_list_projects_with_multiple_filters(self):
- # Create a project
- project = {'id': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID,
- 'name': uuid.uuid4().hex, 'description': uuid.uuid4().hex,
- 'enabled': True, 'parent_id': None, 'is_domain': False}
- self.resource_api.create_project(project['id'], project)
-
- # Build driver hints with the project's name and inexistent description
- hints = driver_hints.Hints()
- hints.add_filter('name', project['name'])
- hints.add_filter('description', uuid.uuid4().hex)
-
- # Retrieve projects based on hints and check an empty list is returned
- projects = self.resource_api.list_projects(hints)
- self.assertEqual([], projects)
-
- # Build correct driver hints
- hints = driver_hints.Hints()
- hints.add_filter('name', project['name'])
- hints.add_filter('description', project['description'])
-
- # Retrieve projects based on hints
- projects = self.resource_api.list_projects(hints)
-
- # Check that the returned list contains only the first project
- self.assertEqual(1, len(projects))
- self.assertEqual(project, projects[0])
-
- def test_list_projects_for_domain(self):
- project_ids = ([x['id'] for x in
- self.resource_api.list_projects_in_domain(
- DEFAULT_DOMAIN_ID)])
- self.assertEqual(4, len(project_ids))
- self.assertIn(self.tenant_bar['id'], project_ids)
- self.assertIn(self.tenant_baz['id'], project_ids)
- self.assertIn(self.tenant_mtu['id'], project_ids)
- self.assertIn(self.tenant_service['id'], project_ids)
-
- @unit.skip_if_no_multiple_domains_support
- def test_list_projects_for_alternate_domain(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
- project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project2['id'], project2)
- project_ids = ([x['id'] for x in
- self.resource_api.list_projects_in_domain(
- domain1['id'])])
- self.assertEqual(2, len(project_ids))
- self.assertIn(project1['id'], project_ids)
- self.assertIn(project2['id'], project_ids)
-
- def _create_projects_hierarchy(self, hierarchy_size=2,
- domain_id=DEFAULT_DOMAIN_ID,
- is_domain=False):
- """Creates a project hierarchy with specified size.
-
- :param hierarchy_size: the desired hierarchy size, default is 2 -
- a project with one child.
- :param domain_id: domain where the projects hierarchy will be created.
- :param is_domain: if the hierarchy will have the is_domain flag active
- or not.
-
- :returns projects: a list of the projects in the created hierarchy.
-
- """
- project_id = uuid.uuid4().hex
- project = {'id': project_id,
- 'description': '',
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None,
- 'domain_id': domain_id,
- 'is_domain': is_domain}
- self.resource_api.create_project(project_id, project)
-
- projects = [project]
- for i in range(1, hierarchy_size):
- new_project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': project_id,
- 'is_domain': is_domain}
- new_project['domain_id'] = domain_id
-
- self.resource_api.create_project(new_project['id'], new_project)
- projects.append(new_project)
- project_id = new_project['id']
-
- return projects
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_domain_with_project_api(self):
- project_id = uuid.uuid4().hex
- project = {'id': project_id,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None,
- 'is_domain': True}
- ref = self.resource_api.create_project(project['id'], project)
- self.assertTrue(ref['is_domain'])
- self.assertEqual(DEFAULT_DOMAIN_ID, ref['domain_id'])
-
- @unit.skip_if_no_multiple_domains_support
- @test_utils.wip('waiting for projects acting as domains implementation')
- def test_is_domain_sub_project_has_parent_domain_id(self):
- project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None,
- 'is_domain': True}
- self.resource_api.create_project(project['id'], project)
-
- sub_project_id = uuid.uuid4().hex
- sub_project = {'id': sub_project_id,
- 'description': '',
- 'domain_id': project['id'],
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': project['id'],
- 'is_domain': True}
- ref = self.resource_api.create_project(sub_project['id'], sub_project)
- self.assertTrue(ref['is_domain'])
- self.assertEqual(project['id'], ref['parent_id'])
- self.assertEqual(project['id'], ref['domain_id'])
-
- @unit.skip_if_no_multiple_domains_support
- @test_utils.wip('waiting for projects acting as domains implementation')
- def test_delete_domain_with_project_api(self):
- project_id = uuid.uuid4().hex
- project = {'id': project_id,
- 'description': '',
- 'domain_id': None,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None,
- 'is_domain': True}
- self.resource_api.create_project(project['id'], project)
-
- # Try to delete is_domain project that is enabled
- self.assertRaises(exception.ValidationError,
- self.resource_api.delete_project,
- project['id'])
-
- # Disable the project
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
-
- # Successfuly delete the project
- self.resource_api.delete_project(project['id'])
-
- @unit.skip_if_no_multiple_domains_support
- @test_utils.wip('waiting for projects acting as domains implementation')
- def test_create_domain_under_regular_project_hierarchy_fails(self):
- # Creating a regular project hierarchy. Projects acting as domains
- # can't have a parent that is a regular project.
- projects_hierarchy = self._create_projects_hierarchy()
- parent = projects_hierarchy[1]
- project_id = uuid.uuid4().hex
- project = {'id': project_id,
- 'description': '',
- 'domain_id': parent['id'],
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': parent['id'],
- 'is_domain': True}
-
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'], project)
-
- @unit.skip_if_no_multiple_domains_support
- @test_utils.wip('waiting for projects acting as domains implementation')
- def test_create_project_under_domain_hierarchy(self):
- projects_hierarchy = self._create_projects_hierarchy(is_domain=True)
- parent = projects_hierarchy[1]
- project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': parent['id'],
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': parent['id'],
- 'is_domain': False}
-
- ref = self.resource_api.create_project(project['id'], project)
- self.assertFalse(ref['is_domain'])
- self.assertEqual(parent['id'], ref['parent_id'])
- self.assertEqual(parent['id'], ref['domain_id'])
-
- def test_create_project_without_is_domain_flag(self):
- project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None}
-
- ref = self.resource_api.create_project(project['id'], project)
- # The is_domain flag should be False by default
- self.assertFalse(ref['is_domain'])
-
- def test_create_is_domain_project(self):
- project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None,
- 'is_domain': True}
-
- ref = self.resource_api.create_project(project['id'], project)
- self.assertTrue(ref['is_domain'])
-
- @test_utils.wip('waiting for projects acting as domains implementation')
- def test_create_project_with_parent_id_and_without_domain_id(self):
- project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': None,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None}
- self.resource_api.create_project(project['id'], project)
-
- sub_project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': project['id']}
- ref = self.resource_api.create_project(sub_project['id'], sub_project)
-
- # The domain_id should be set to the parent domain_id
- self.assertEqual(project['domain_id'], ref['domain_id'])
-
- @test_utils.wip('waiting for projects acting as domains implementation')
- def test_create_project_with_domain_id_and_without_parent_id(self):
- project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': None,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None}
- self.resource_api.create_project(project['id'], project)
-
- sub_project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'enabled': True,
- 'domain_id': project['id'],
- 'name': uuid.uuid4().hex}
- ref = self.resource_api.create_project(sub_project['id'], sub_project)
-
- # The parent_id should be set to the domain_id
- self.assertEqual(ref['parent_id'], project['id'])
-
- def test_check_leaf_projects(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- self.assertFalse(self.resource_api.is_leaf_project(
- root_project['id']))
- self.assertTrue(self.resource_api.is_leaf_project(
- leaf_project['id']))
-
- # Delete leaf_project
- self.resource_api.delete_project(leaf_project['id'])
-
- # Now, root_project should be leaf
- self.assertTrue(self.resource_api.is_leaf_project(
- root_project['id']))
-
- def test_list_projects_in_subtree(self):
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- project1 = projects_hierarchy[0]
- project2 = projects_hierarchy[1]
- project3 = projects_hierarchy[2]
- project4 = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': project2['id'],
- 'is_domain': False}
- self.resource_api.create_project(project4['id'], project4)
-
- subtree = self.resource_api.list_projects_in_subtree(project1['id'])
- self.assertEqual(3, len(subtree))
- self.assertIn(project2, subtree)
- self.assertIn(project3, subtree)
- self.assertIn(project4, subtree)
-
- subtree = self.resource_api.list_projects_in_subtree(project2['id'])
- self.assertEqual(2, len(subtree))
- self.assertIn(project3, subtree)
- self.assertIn(project4, subtree)
-
- subtree = self.resource_api.list_projects_in_subtree(project3['id'])
- self.assertEqual(0, len(subtree))
-
- def test_list_projects_in_subtree_with_circular_reference(self):
- project1_id = uuid.uuid4().hex
- project2_id = uuid.uuid4().hex
-
- project1 = {'id': project1_id,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex}
- self.resource_api.create_project(project1['id'], project1)
-
- project2 = {'id': project2_id,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': project1_id}
- self.resource_api.create_project(project2['id'], project2)
-
- project1['parent_id'] = project2_id # Adds cyclic reference
-
- # NOTE(dstanek): The manager does not allow parent_id to be updated.
- # Instead will directly use the driver to create the cyclic
- # reference.
- self.resource_api.driver.update_project(project1_id, project1)
-
- subtree = self.resource_api.list_projects_in_subtree(project1_id)
-
- # NOTE(dstanek): If a cyclic refence is detected the code bails
- # and returns None instead of falling into the infinite
- # recursion trap.
- self.assertIsNone(subtree)
-
- def test_list_projects_in_subtree_invalid_project_id(self):
- self.assertRaises(exception.ValidationError,
- self.resource_api.list_projects_in_subtree,
- None)
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.list_projects_in_subtree,
- uuid.uuid4().hex)
-
- def test_list_project_parents(self):
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- project1 = projects_hierarchy[0]
- project2 = projects_hierarchy[1]
- project3 = projects_hierarchy[2]
- project4 = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': project2['id'],
- 'is_domain': False}
- self.resource_api.create_project(project4['id'], project4)
-
- parents1 = self.resource_api.list_project_parents(project3['id'])
- self.assertEqual(2, len(parents1))
- self.assertIn(project1, parents1)
- self.assertIn(project2, parents1)
-
- parents2 = self.resource_api.list_project_parents(project4['id'])
- self.assertEqual(parents1, parents2)
-
- parents = self.resource_api.list_project_parents(project1['id'])
- self.assertEqual(0, len(parents))
-
- def test_list_project_parents_invalid_project_id(self):
- self.assertRaises(exception.ValidationError,
- self.resource_api.list_project_parents,
- None)
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.list_project_parents,
- uuid.uuid4().hex)
-
- def test_delete_project_with_role_assignments(self):
- tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(tenant['id'], tenant)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], tenant['id'], 'member')
- self.resource_api.delete_project(tenant['id'])
- self.assertRaises(exception.NotFound,
- self.resource_api.get_project,
- tenant['id'])
-
- def test_delete_role_check_role_grant(self):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- alt_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- self.role_api.create_role(alt_role['id'], alt_role)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], role['id'])
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'], alt_role['id'])
- self.role_api.delete_role(role['id'])
- roles_ref = self.assignment_api.get_roles_for_user_and_project(
- self.user_foo['id'], self.tenant_bar['id'])
- self.assertNotIn(role['id'], roles_ref)
- self.assertIn(alt_role['id'], roles_ref)
-
- def test_create_project_doesnt_modify_passed_in_dict(self):
- new_project = {'id': 'tenant_id', 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- original_project = new_project.copy()
- self.resource_api.create_project('tenant_id', new_project)
- self.assertDictEqual(original_project, new_project)
-
- def test_create_user_doesnt_modify_passed_in_dict(self):
- new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
- original_user = new_user.copy()
- self.identity_api.create_user(new_user)
- self.assertDictEqual(original_user, new_user)
-
- def test_update_user_enable(self):
- user = {'name': 'fake1', 'enabled': True,
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(True, user_ref['enabled'])
-
- user['enabled'] = False
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(user['enabled'], user_ref['enabled'])
-
- # If not present, enabled field should not be updated
- del user['enabled']
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(False, user_ref['enabled'])
-
- user['enabled'] = True
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(user['enabled'], user_ref['enabled'])
-
- del user['enabled']
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(True, user_ref['enabled'])
-
- # Integers are valid Python's booleans. Explicitly test it.
- user['enabled'] = 0
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(False, user_ref['enabled'])
-
- # Any integers other than 0 are interpreted as True
- user['enabled'] = -42
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(True, user_ref['enabled'])
-
- def test_update_user_name(self):
- user = {'name': uuid.uuid4().hex,
- 'enabled': True,
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(user['name'], user_ref['name'])
-
- changed_name = user_ref['name'] + '_changed'
- user_ref['name'] = changed_name
- updated_user = self.identity_api.update_user(user_ref['id'], user_ref)
-
- # NOTE(dstanek): the SQL backend adds an 'extra' field containing a
- # dictionary of the extra fields in addition to the
- # fields in the object. For the details see:
- # SqlIdentity.test_update_project_returns_extra
- updated_user.pop('extra', None)
-
- self.assertDictEqual(user_ref, updated_user)
-
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertEqual(changed_name, user_ref['name'])
-
- def test_update_user_enable_fails(self):
- user = {'name': 'fake1', 'enabled': True,
- 'domain_id': DEFAULT_DOMAIN_ID}
- user = self.identity_api.create_user(user)
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(True, user_ref['enabled'])
-
- # Strings are not valid boolean values
- user['enabled'] = "false"
- self.assertRaises(exception.ValidationError,
- self.identity_api.update_user,
- user['id'],
- user)
-
- def test_update_project_enable(self):
- tenant = {'id': 'fake1', 'name': 'fake1', 'enabled': True,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project('fake1', tenant)
- tenant_ref = self.resource_api.get_project('fake1')
- self.assertEqual(True, tenant_ref['enabled'])
-
- tenant['enabled'] = False
- self.resource_api.update_project('fake1', tenant)
- tenant_ref = self.resource_api.get_project('fake1')
- self.assertEqual(tenant['enabled'], tenant_ref['enabled'])
-
- # If not present, enabled field should not be updated
- del tenant['enabled']
- self.resource_api.update_project('fake1', tenant)
- tenant_ref = self.resource_api.get_project('fake1')
- self.assertEqual(False, tenant_ref['enabled'])
-
- tenant['enabled'] = True
- self.resource_api.update_project('fake1', tenant)
- tenant_ref = self.resource_api.get_project('fake1')
- self.assertEqual(tenant['enabled'], tenant_ref['enabled'])
-
- del tenant['enabled']
- self.resource_api.update_project('fake1', tenant)
- tenant_ref = self.resource_api.get_project('fake1')
- self.assertEqual(True, tenant_ref['enabled'])
-
- def test_add_user_to_group(self):
- domain = self._get_domain_fixture()
- new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- groups = self.identity_api.list_groups_for_user(new_user['id'])
-
- found = False
- for x in groups:
- if (x['id'] == new_group['id']):
- found = True
- self.assertTrue(found)
-
- def test_add_user_to_group_404(self):
- domain = self._get_domain_fixture()
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.add_user_to_group,
- new_user['id'],
- uuid.uuid4().hex)
-
- new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- self.assertRaises(exception.UserNotFound,
- self.identity_api.add_user_to_group,
- uuid.uuid4().hex,
- new_group['id'])
-
- self.assertRaises(exception.NotFound,
- self.identity_api.add_user_to_group,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_check_user_in_group(self):
- domain = self._get_domain_fixture()
- new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- self.identity_api.check_user_in_group(new_user['id'], new_group['id'])
-
- def test_create_invalid_domain_fails(self):
- new_group = {'domain_id': "doesnotexist", 'name': uuid.uuid4().hex}
- self.assertRaises(exception.DomainNotFound,
- self.identity_api.create_group,
- new_group)
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': "doesnotexist"}
- self.assertRaises(exception.DomainNotFound,
- self.identity_api.create_user,
- new_user)
-
- def test_check_user_not_in_group(self):
- new_group = {
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
-
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': DEFAULT_DOMAIN_ID}
- new_user = self.identity_api.create_user(new_user)
-
- self.assertRaises(exception.NotFound,
- self.identity_api.check_user_in_group,
- new_user['id'],
- new_group['id'])
-
- def test_check_user_in_group_404(self):
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': DEFAULT_DOMAIN_ID}
- new_user = self.identity_api.create_user(new_user)
-
- new_group = {
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
-
- self.assertRaises(exception.UserNotFound,
- self.identity_api.check_user_in_group,
- uuid.uuid4().hex,
- new_group['id'])
-
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.check_user_in_group,
- new_user['id'],
- uuid.uuid4().hex)
-
- self.assertRaises(exception.NotFound,
- self.identity_api.check_user_in_group,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_list_users_in_group(self):
- domain = self._get_domain_fixture()
- new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- # Make sure we get an empty list back on a new group, not an error.
- user_refs = self.identity_api.list_users_in_group(new_group['id'])
- self.assertEqual([], user_refs)
- # Make sure we get the correct users back once they have been added
- # to the group.
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- user_refs = self.identity_api.list_users_in_group(new_group['id'])
- found = False
- for x in user_refs:
- if (x['id'] == new_user['id']):
- found = True
- self.assertNotIn('password', x)
- self.assertTrue(found)
-
- def test_list_users_in_group_404(self):
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.list_users_in_group,
- uuid.uuid4().hex)
-
- def test_list_groups_for_user(self):
- domain = self._get_domain_fixture()
- test_groups = []
- test_users = []
- GROUP_COUNT = 3
- USER_COUNT = 2
-
- for x in range(0, USER_COUNT):
- new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- test_users.append(new_user)
- positive_user = test_users[0]
- negative_user = test_users[1]
-
- for x in range(0, USER_COUNT):
- group_refs = self.identity_api.list_groups_for_user(
- test_users[x]['id'])
- self.assertEqual(0, len(group_refs))
-
- for x in range(0, GROUP_COUNT):
- before_count = x
- after_count = x + 1
- new_group = {'domain_id': domain['id'],
- 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- test_groups.append(new_group)
-
- # add the user to the group and ensure that the
- # group count increases by one for each
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(before_count, len(group_refs))
- self.identity_api.add_user_to_group(
- positive_user['id'],
- new_group['id'])
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(after_count, len(group_refs))
-
- # Make sure the group count for the unrelated user did not change
- group_refs = self.identity_api.list_groups_for_user(
- negative_user['id'])
- self.assertEqual(0, len(group_refs))
-
- # remove the user from each group and ensure that
- # the group count reduces by one for each
- for x in range(0, 3):
- before_count = GROUP_COUNT - x
- after_count = GROUP_COUNT - x - 1
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(before_count, len(group_refs))
- self.identity_api.remove_user_from_group(
- positive_user['id'],
- test_groups[x]['id'])
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(after_count, len(group_refs))
- # Make sure the group count for the unrelated user
- # did not change
- group_refs = self.identity_api.list_groups_for_user(
- negative_user['id'])
- self.assertEqual(0, len(group_refs))
-
- def test_remove_user_from_group(self):
- domain = self._get_domain_fixture()
- new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
- groups = self.identity_api.list_groups_for_user(new_user['id'])
- self.assertIn(new_group['id'], [x['id'] for x in groups])
- self.identity_api.remove_user_from_group(new_user['id'],
- new_group['id'])
- groups = self.identity_api.list_groups_for_user(new_user['id'])
- self.assertNotIn(new_group['id'], [x['id'] for x in groups])
-
- def test_remove_user_from_group_404(self):
- domain = self._get_domain_fixture()
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.remove_user_from_group,
- new_user['id'],
- uuid.uuid4().hex)
-
- self.assertRaises(exception.UserNotFound,
- self.identity_api.remove_user_from_group,
- uuid.uuid4().hex,
- new_group['id'])
-
- self.assertRaises(exception.NotFound,
- self.identity_api.remove_user_from_group,
- uuid.uuid4().hex,
- uuid.uuid4().hex)
-
- def test_group_crud(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex}
- group = self.identity_api.create_group(group)
- group_ref = self.identity_api.get_group(group['id'])
- self.assertDictContainsSubset(group, group_ref)
-
- group['name'] = uuid.uuid4().hex
- self.identity_api.update_group(group['id'], group)
- group_ref = self.identity_api.get_group(group['id'])
- self.assertDictContainsSubset(group, group_ref)
-
- self.identity_api.delete_group(group['id'])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group,
- group['id'])
-
- def test_get_group_by_name(self):
- group_name = uuid.uuid4().hex
- group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': group_name}
- group = self.identity_api.create_group(group)
- spoiler = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
- self.identity_api.create_group(spoiler)
-
- group_ref = self.identity_api.get_group_by_name(
- group_name, DEFAULT_DOMAIN_ID)
- self.assertDictEqual(group_ref, group)
-
- def test_get_group_by_name_404(self):
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group_by_name,
- uuid.uuid4().hex,
- DEFAULT_DOMAIN_ID)
-
- @unit.skip_if_cache_disabled('identity')
- def test_cache_layer_group_crud(self):
- group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
- group = self.identity_api.create_group(group)
- # cache the result
- group_ref = self.identity_api.get_group(group['id'])
- # delete the group bypassing identity api.
- domain_id, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(group['id']))
- driver.delete_group(entity_id)
-
- self.assertEqual(group_ref, self.identity_api.get_group(group['id']))
- self.identity_api.get_group.invalidate(self.identity_api, group['id'])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group, group['id'])
-
- group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
- group = self.identity_api.create_group(group)
- # cache the result
- self.identity_api.get_group(group['id'])
- group['name'] = uuid.uuid4().hex
- group_ref = self.identity_api.update_group(group['id'], group)
- # after updating through identity api, get updated group
- self.assertDictContainsSubset(self.identity_api.get_group(group['id']),
- group_ref)
-
- def test_create_duplicate_group_name_fails(self):
- group1 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
- group2 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': group1['name']}
- group1 = self.identity_api.create_group(group1)
- self.assertRaises(exception.Conflict,
- self.identity_api.create_group,
- group2)
-
- def test_create_duplicate_group_name_in_different_domains(self):
- new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(new_domain['id'], new_domain)
- group1 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex}
- group2 = {'domain_id': new_domain['id'], 'name': group1['name']}
- group1 = self.identity_api.create_group(group1)
- group2 = self.identity_api.create_group(group2)
-
- def test_move_group_between_domains(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- group = {'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- group = self.identity_api.create_group(group)
- group['domain_id'] = domain2['id']
- self.identity_api.update_group(group['id'], group)
-
- def test_move_group_between_domains_with_clashing_names_fails(self):
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- # First, create a group in domain1
- group1 = {'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- group1 = self.identity_api.create_group(group1)
- # Now create a group in domain2 with a potentially clashing
- # name - which should work since we have domain separation
- group2 = {'name': group1['name'],
- 'domain_id': domain2['id']}
- group2 = self.identity_api.create_group(group2)
- # Now try and move group1 into the 2nd domain - which should
- # fail since the names clash
- group1['domain_id'] = domain2['id']
- self.assertRaises(exception.Conflict,
- self.identity_api.update_group,
- group1['id'],
- group1)
-
- @unit.skip_if_no_multiple_domains_support
- def test_project_crud(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- self.resource_api.create_domain(domain['id'], domain)
- project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictContainsSubset(project, project_ref)
-
- project['name'] = uuid.uuid4().hex
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictContainsSubset(project, project_ref)
-
- self.resource_api.delete_project(project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- def test_domain_delete_hierarchy(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- self.resource_api.create_domain(domain['id'], domain)
-
- # Creating a root and a leaf project inside the domain
- projects_hierarchy = self._create_projects_hierarchy(
- domain_id=domain['id'])
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[0]
-
- # Disable the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
-
- # Delete the domain
- self.resource_api.delete_domain(domain['id'])
-
- # Make sure the domain no longer exists
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- # Make sure the root project no longer exists
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project['id'])
-
- # Make sure the leaf project no longer exists
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- leaf_project['id'])
-
- def test_hierarchical_projects_crud(self):
- # create a hierarchy with just a root project (which is a leaf as well)
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=1)
- root_project1 = projects_hierarchy[0]
-
- # create a hierarchy with one root project and one leaf project
- projects_hierarchy = self._create_projects_hierarchy()
- root_project2 = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- # update description from leaf_project
- leaf_project['description'] = 'new description'
- self.resource_api.update_project(leaf_project['id'], leaf_project)
- proj_ref = self.resource_api.get_project(leaf_project['id'])
- self.assertDictEqual(proj_ref, leaf_project)
-
- # update the parent_id is not allowed
- leaf_project['parent_id'] = root_project1['id']
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.update_project,
- leaf_project['id'],
- leaf_project)
-
- # delete root_project1
- self.resource_api.delete_project(root_project1['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project1['id'])
-
- # delete root_project2 is not allowed since it is not a leaf project
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.delete_project,
- root_project2['id'])
-
- def test_create_project_with_invalid_parent(self):
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'parent_id': 'fake',
- 'is_domain': False}
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.create_project,
- project['id'],
- project)
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_leaf_project_with_different_domain(self):
- root_project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(root_project['id'], root_project)
-
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- self.resource_api.create_domain(domain['id'], domain)
- leaf_project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': domain['id'],
- 'enabled': True,
- 'parent_id': root_project['id'],
- 'is_domain': False}
-
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- leaf_project['id'],
- leaf_project)
-
- def test_delete_hierarchical_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- self.resource_api.delete_project(leaf_project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- leaf_project['id'])
-
- self.resource_api.delete_project(root_project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project['id'])
-
- def test_delete_hierarchical_not_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
-
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.delete_project,
- root_project['id'])
-
- def test_update_project_parent(self):
- projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
- project1 = projects_hierarchy[0]
- project2 = projects_hierarchy[1]
- project3 = projects_hierarchy[2]
-
- # project2 is the parent from project3
- self.assertEqual(project3.get('parent_id'), project2['id'])
-
- # try to update project3 parent to parent1
- project3['parent_id'] = project1['id']
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.update_project,
- project3['id'],
- project3)
-
- def test_create_project_under_disabled_one(self):
- project1 = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': False,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(project1['id'], project1)
-
- project2 = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': project1['id'],
- 'is_domain': False}
-
- # It's not possible to create a project under a disabled one in the
- # hierarchy
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project2['id'],
- project2)
-
- def test_disable_hierarchical_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- leaf_project = projects_hierarchy[1]
-
- leaf_project['enabled'] = False
- self.resource_api.update_project(leaf_project['id'], leaf_project)
-
- project_ref = self.resource_api.get_project(leaf_project['id'])
- self.assertEqual(project_ref['enabled'], leaf_project['enabled'])
-
- def test_disable_hierarchical_not_leaf_project(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
-
- root_project['enabled'] = False
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.update_project,
- root_project['id'],
- root_project)
-
- def test_enable_project_with_disabled_parent(self):
- projects_hierarchy = self._create_projects_hierarchy()
- root_project = projects_hierarchy[0]
- leaf_project = projects_hierarchy[1]
-
- # Disable leaf and root
- leaf_project['enabled'] = False
- self.resource_api.update_project(leaf_project['id'], leaf_project)
- root_project['enabled'] = False
- self.resource_api.update_project(root_project['id'], root_project)
-
- # Try to enable the leaf project, it's not possible since it has
- # a disabled parent
- leaf_project['enabled'] = True
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.update_project,
- leaf_project['id'],
- leaf_project)
-
- def _get_hierarchy_depth(self, project_id):
- return len(self.resource_api.list_project_parents(project_id)) + 1
-
- def test_check_hierarchy_depth(self):
- # First create a hierarchy with the max allowed depth
- projects_hierarchy = self._create_projects_hierarchy(
- CONF.max_project_tree_depth)
- leaf_project = projects_hierarchy[CONF.max_project_tree_depth - 1]
-
- depth = self._get_hierarchy_depth(leaf_project['id'])
- self.assertEqual(CONF.max_project_tree_depth, depth)
-
- # Creating another project in the hierarchy shouldn't be allowed
- project_id = uuid.uuid4().hex
- project = {
- 'id': project_id,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': leaf_project['id'],
- 'is_domain': False}
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.create_project,
- project_id,
- project)
-
- def test_project_update_missing_attrs_with_a_value(self):
- # Creating a project with no description attribute.
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(project['id'], project)
-
- # Add a description attribute.
- project['description'] = uuid.uuid4().hex
- self.resource_api.update_project(project['id'], project)
-
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project_ref, project)
-
- def test_project_update_missing_attrs_with_a_falsey_value(self):
- # Creating a project with no description attribute.
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(project['id'], project)
-
- # Add a description attribute.
- project['description'] = ''
- self.resource_api.update_project(project['id'], project)
-
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project_ref, project)
-
- def test_domain_crud(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- self.resource_api.create_domain(domain['id'], domain)
- domain_ref = self.resource_api.get_domain(domain['id'])
- self.assertDictEqual(domain_ref, domain)
-
- domain['name'] = uuid.uuid4().hex
- self.resource_api.update_domain(domain['id'], domain)
- domain_ref = self.resource_api.get_domain(domain['id'])
- self.assertDictEqual(domain_ref, domain)
-
- # Ensure an 'enabled' domain cannot be deleted
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.delete_domain,
- domain_id=domain['id'])
-
- # Disable the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
-
- # Delete the domain
- self.resource_api.delete_domain(domain['id'])
-
- # Make sure the domain no longer exists
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_domain_case_sensitivity(self):
- # create a ref with a lowercase name
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex.lower()}
- self.resource_api.create_domain(ref['id'], ref)
-
- # assign a new ID with the same name, but this time in uppercase
- ref['id'] = uuid.uuid4().hex
- ref['name'] = ref['name'].upper()
- self.resource_api.create_domain(ref['id'], ref)
-
- def test_attribute_update(self):
- project = {
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.resource_api.create_project(project['id'], project)
-
- # pick a key known to be non-existent
- key = 'description'
-
- def assert_key_equals(value):
- project_ref = self.resource_api.update_project(
- project['id'], project)
- self.assertEqual(value, project_ref[key])
- project_ref = self.resource_api.get_project(project['id'])
- self.assertEqual(value, project_ref[key])
-
- def assert_get_key_is(value):
- project_ref = self.resource_api.update_project(
- project['id'], project)
- self.assertIs(project_ref.get(key), value)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertIs(project_ref.get(key), value)
-
- # add an attribute that doesn't exist, set it to a falsey value
- value = ''
- project[key] = value
- assert_key_equals(value)
-
- # set an attribute with a falsey value to null
- value = None
- project[key] = value
- assert_get_key_is(value)
-
- # do it again, in case updating from this situation is handled oddly
- value = None
- project[key] = value
- assert_get_key_is(value)
-
- # set a possibly-null value to a falsey value
- value = ''
- project[key] = value
- assert_key_equals(value)
-
- # set a falsey value to a truthy value
- value = uuid.uuid4().hex
- project[key] = value
- assert_key_equals(value)
-
- def test_user_crud(self):
- user_dict = {'domain_id': DEFAULT_DOMAIN_ID,
- 'name': uuid.uuid4().hex, 'password': 'passw0rd'}
- user = self.identity_api.create_user(user_dict)
- user_ref = self.identity_api.get_user(user['id'])
- del user_dict['password']
- user_ref_dict = {x: user_ref[x] for x in user_ref}
- self.assertDictContainsSubset(user_dict, user_ref_dict)
-
- user_dict['password'] = uuid.uuid4().hex
- self.identity_api.update_user(user['id'], user_dict)
- user_ref = self.identity_api.get_user(user['id'])
- del user_dict['password']
- user_ref_dict = {x: user_ref[x] for x in user_ref}
- self.assertDictContainsSubset(user_dict, user_ref_dict)
-
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- user['id'])
-
- def test_list_projects_for_user(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(0, len(user_projects))
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_baz['id'],
- role_id=self.role_member['id'])
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(2, len(user_projects))
-
- def test_list_projects_for_user_with_grants(self):
- # Create two groups each with a role on a different project, and
- # make user1 a member of both groups. Both these new projects
- # should now be included, along with any direct user grants.
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
- group1 = self.identity_api.create_group(group1)
- group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
- group2 = self.identity_api.create_group(group2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- self.resource_api.create_project(project1['id'], project1)
- project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- self.resource_api.create_project(project2['id'], project2)
- self.identity_api.add_user_to_group(user1['id'], group1['id'])
- self.identity_api.add_user_to_group(user1['id'], group2['id'])
-
- # Create 3 grants, one user grant, the other two as group grants
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=project1['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- project_id=project2['id'],
- role_id=self.role_admin['id'])
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(3, len(user_projects))
-
- @unit.skip_if_cache_disabled('resource')
- @unit.skip_if_no_multiple_domains_support
- def test_domain_rename_invalidates_get_domain_by_name_cache(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- domain_id = domain['id']
- domain_name = domain['name']
- self.resource_api.create_domain(domain_id, domain)
- domain_ref = self.resource_api.get_domain_by_name(domain_name)
- domain_ref['name'] = uuid.uuid4().hex
- self.resource_api.update_domain(domain_id, domain_ref)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain_by_name,
- domain_name)
-
- @unit.skip_if_cache_disabled('resource')
- def test_cache_layer_domain_crud(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- domain_id = domain['id']
- # Create Domain
- self.resource_api.create_domain(domain_id, domain)
- domain_ref = self.resource_api.get_domain(domain_id)
- updated_domain_ref = copy.deepcopy(domain_ref)
- updated_domain_ref['name'] = uuid.uuid4().hex
- # Update domain, bypassing resource api manager
- self.resource_api.driver.update_domain(domain_id, updated_domain_ref)
- # Verify get_domain still returns the domain
- self.assertDictContainsSubset(
- domain_ref, self.resource_api.get_domain(domain_id))
- # Invalidate cache
- self.resource_api.get_domain.invalidate(self.resource_api,
- domain_id)
- # Verify get_domain returns the updated domain
- self.assertDictContainsSubset(
- updated_domain_ref, self.resource_api.get_domain(domain_id))
- # Update the domain back to original ref, using the assignment api
- # manager
- self.resource_api.update_domain(domain_id, domain_ref)
- self.assertDictContainsSubset(
- domain_ref, self.resource_api.get_domain(domain_id))
- # Make sure domain is 'disabled', bypass resource api manager
- domain_ref_disabled = domain_ref.copy()
- domain_ref_disabled['enabled'] = False
- self.resource_api.driver.update_domain(domain_id,
- domain_ref_disabled)
- # Delete domain, bypassing resource api manager
- self.resource_api.driver.delete_domain(domain_id)
- # Verify get_domain still returns the domain
- self.assertDictContainsSubset(
- domain_ref, self.resource_api.get_domain(domain_id))
- # Invalidate cache
- self.resource_api.get_domain.invalidate(self.resource_api,
- domain_id)
- # Verify get_domain now raises DomainNotFound
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain, domain_id)
- # Recreate Domain
- self.resource_api.create_domain(domain_id, domain)
- self.resource_api.get_domain(domain_id)
- # Make sure domain is 'disabled', bypass resource api manager
- domain['enabled'] = False
- self.resource_api.driver.update_domain(domain_id, domain)
- # Delete domain
- self.resource_api.delete_domain(domain_id)
- # verify DomainNotFound raised
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain_id)
-
- @unit.skip_if_cache_disabled('resource')
- @unit.skip_if_no_multiple_domains_support
- def test_project_rename_invalidates_get_project_by_name_cache(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- project_id = project['id']
- project_name = project['name']
- self.resource_api.create_domain(domain['id'], domain)
- # Create a project
- self.resource_api.create_project(project_id, project)
- self.resource_api.get_project_by_name(project_name, domain['id'])
- project['name'] = uuid.uuid4().hex
- self.resource_api.update_project(project_id, project)
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project_by_name,
- project_name,
- domain['id'])
-
- @unit.skip_if_cache_disabled('resource')
- @unit.skip_if_no_multiple_domains_support
- def test_cache_layer_project_crud(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- project_id = project['id']
- self.resource_api.create_domain(domain['id'], domain)
- # Create a project
- self.resource_api.create_project(project_id, project)
- self.resource_api.get_project(project_id)
- updated_project = copy.deepcopy(project)
- updated_project['name'] = uuid.uuid4().hex
- # Update project, bypassing resource manager
- self.resource_api.driver.update_project(project_id,
- updated_project)
- # Verify get_project still returns the original project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Invalidate cache
- self.resource_api.get_project.invalidate(self.resource_api,
- project_id)
- # Verify get_project now returns the new project
- self.assertDictContainsSubset(
- updated_project,
- self.resource_api.get_project(project_id))
- # Update project using the resource_api manager back to original
- self.resource_api.update_project(project['id'], project)
- # Verify get_project returns the original project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Delete project bypassing resource
- self.resource_api.driver.delete_project(project_id)
- # Verify get_project still returns the project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Invalidate cache
- self.resource_api.get_project.invalidate(self.resource_api,
- project_id)
- # Verify ProjectNotFound now raised
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project_id)
- # recreate project
- self.resource_api.create_project(project_id, project)
- self.resource_api.get_project(project_id)
- # delete project
- self.resource_api.delete_project(project_id)
- # Verify ProjectNotFound is raised
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project_id)
-
- def create_user_dict(self, **attributes):
- user_dict = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True}
- user_dict.update(attributes)
- return user_dict
-
- def test_arbitrary_attributes_are_returned_from_create_user(self):
- attr_value = uuid.uuid4().hex
- user_data = self.create_user_dict(arbitrary_attr=attr_value)
-
- user = self.identity_api.create_user(user_data)
-
- self.assertEqual(attr_value, user['arbitrary_attr'])
-
- def test_arbitrary_attributes_are_returned_from_get_user(self):
- attr_value = uuid.uuid4().hex
- user_data = self.create_user_dict(arbitrary_attr=attr_value)
-
- user_data = self.identity_api.create_user(user_data)
-
- user = self.identity_api.get_user(user_data['id'])
- self.assertEqual(attr_value, user['arbitrary_attr'])
-
- def test_new_arbitrary_attributes_are_returned_from_update_user(self):
- user_data = self.create_user_dict()
-
- user = self.identity_api.create_user(user_data)
- attr_value = uuid.uuid4().hex
- user['arbitrary_attr'] = attr_value
- updated_user = self.identity_api.update_user(user['id'], user)
-
- self.assertEqual(attr_value, updated_user['arbitrary_attr'])
-
- def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
- attr_value = uuid.uuid4().hex
- user_data = self.create_user_dict(arbitrary_attr=attr_value)
-
- new_attr_value = uuid.uuid4().hex
- user = self.identity_api.create_user(user_data)
- user['arbitrary_attr'] = new_attr_value
- updated_user = self.identity_api.update_user(user['id'], user)
-
- self.assertEqual(new_attr_value, updated_user['arbitrary_attr'])
-
- def test_create_grant_no_user(self):
- # If call create_grant with a user that doesn't exist, doesn't fail.
- self.assignment_api.create_grant(
- self.role_other['id'],
- user_id=uuid.uuid4().hex,
- project_id=self.tenant_bar['id'])
-
- def test_create_grant_no_group(self):
- # If call create_grant with a group that doesn't exist, doesn't fail.
- self.assignment_api.create_grant(
- self.role_other['id'],
- group_id=uuid.uuid4().hex,
- project_id=self.tenant_bar['id'])
-
- @unit.skip_if_no_multiple_domains_support
- def test_get_default_domain_by_name(self):
- domain_name = 'default'
-
- domain = {'id': uuid.uuid4().hex, 'name': domain_name, 'enabled': True}
- self.resource_api.create_domain(domain['id'], domain)
-
- domain_ref = self.resource_api.get_domain_by_name(domain_name)
- self.assertEqual(domain, domain_ref)
-
- def test_get_not_default_domain_by_name(self):
- domain_name = 'foo'
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain_by_name,
- domain_name)
-
- def test_project_update_and_project_get_return_same_response(self):
- project = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
-
- self.resource_api.create_project(project['id'], project)
-
- updated_project = {'enabled': False}
- updated_project_ref = self.resource_api.update_project(
- project['id'], updated_project)
-
- # SQL backend adds 'extra' field
- updated_project_ref.pop('extra', None)
-
- self.assertIs(False, updated_project_ref['enabled'])
-
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project_ref, updated_project_ref)
-
- def test_user_update_and_user_get_return_same_response(self):
- user = {
- 'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
-
- user = self.identity_api.create_user(user)
-
- updated_user = {'enabled': False}
- updated_user_ref = self.identity_api.update_user(
- user['id'], updated_user)
-
- # SQL backend adds 'extra' field
- updated_user_ref.pop('extra', None)
-
- self.assertIs(False, updated_user_ref['enabled'])
-
- user_ref = self.identity_api.get_user(user['id'])
- self.assertDictEqual(user_ref, updated_user_ref)
-
- def test_delete_group_removes_role_assignments(self):
- # When a group is deleted any role assignments for the group are
- # removed.
-
- MEMBER_ROLE_ID = 'member'
-
- def get_member_assignments():
- assignments = self.assignment_api.list_role_assignments()
- return [x for x in assignments if x['role_id'] == MEMBER_ROLE_ID]
-
- orig_member_assignments = get_member_assignments()
-
- # Create a group.
- new_group = {
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'name': self.getUniqueString(prefix='tdgrra')}
- new_group = self.identity_api.create_group(new_group)
-
- # Create a project.
- new_project = {
- 'id': uuid.uuid4().hex,
- 'name': self.getUniqueString(prefix='tdgrra'),
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(new_project['id'], new_project)
-
- # Assign a role to the group.
- self.assignment_api.create_grant(
- group_id=new_group['id'], project_id=new_project['id'],
- role_id=MEMBER_ROLE_ID)
-
- # Delete the group.
- self.identity_api.delete_group(new_group['id'])
-
- # Check that the role assignment for the group is gone
- member_assignments = get_member_assignments()
-
- self.assertThat(member_assignments,
- matchers.Equals(orig_member_assignments))
-
- def test_get_roles_for_groups_on_domain(self):
- """Test retrieving group domain roles.
-
- Test Plan:
-
- - Create a domain, three groups and three roles
- - Assign one an inherited and the others a non-inherited group role
- to the domain
- - Ensure that only the non-inherited roles are returned on the domain
-
- """
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- group_list = []
- group_id_list = []
- role_list = []
- for _ in range(3):
- group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
- group = self.identity_api.create_group(group)
- group_list.append(group)
- group_id_list.append(group['id'])
-
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- # Assign the roles - one is inherited
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'],
- inherited_to_projects=True)
-
- # Now get the effective roles for the groups on the domain project. We
- # shouldn't get back the inherited role.
-
- role_refs = self.assignment_api.get_roles_for_groups(
- group_id_list, domain_id=domain1['id'])
-
- self.assertThat(role_refs, matchers.HasLength(2))
- self.assertIn(role_list[0], role_refs)
- self.assertIn(role_list[1], role_refs)
-
- def test_get_roles_for_groups_on_project(self):
- """Test retrieving group project roles.
-
- Test Plan:
-
- - Create two domains, two projects, six groups and six roles
- - Project1 is in Domain1, Project2 is in Domain2
- - Domain2/Project2 are spoilers
- - Assign a different direct group role to each project as well
- as both an inherited and non-inherited role to each domain
- - Get the group roles for Project 1 - depending on whether we have
- enabled inheritance, we should either get back just the direct role
- or both the direct one plus the inherited domain role from Domain 1
-
- """
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
- project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain2['id']}
- self.resource_api.create_project(project2['id'], project2)
- group_list = []
- group_id_list = []
- role_list = []
- for _ in range(6):
- group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
- group = self.identity_api.create_group(group)
- group_list.append(group)
- group_id_list.append(group['id'])
-
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- # Assign the roles - one inherited and one non-inherited on Domain1,
- # plus one on Project1
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- project_id=project1['id'],
- role_id=role_list[2]['id'])
-
- # ...and a duplicate set of spoiler assignments to Domain2/Project2
- self.assignment_api.create_grant(group_id=group_list[3]['id'],
- domain_id=domain2['id'],
- role_id=role_list[3]['id'])
- self.assignment_api.create_grant(group_id=group_list[4]['id'],
- domain_id=domain2['id'],
- role_id=role_list[4]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[5]['id'],
- project_id=project2['id'],
- role_id=role_list[5]['id'])
-
- # Now get the effective roles for all groups on the Project1. With
- # inheritance off, we should only get back the direct role.
-
- self.config_fixture.config(group='os_inherit', enabled=False)
- role_refs = self.assignment_api.get_roles_for_groups(
- group_id_list, project_id=project1['id'])
-
- self.assertThat(role_refs, matchers.HasLength(1))
- self.assertIn(role_list[2], role_refs)
-
- # With inheritance on, we should also get back the inherited role from
- # its owning domain.
-
- self.config_fixture.config(group='os_inherit', enabled=True)
- role_refs = self.assignment_api.get_roles_for_groups(
- group_id_list, project_id=project1['id'])
-
- self.assertThat(role_refs, matchers.HasLength(2))
- self.assertIn(role_list[1], role_refs)
- self.assertIn(role_list[2], role_refs)
-
- def test_list_domains_for_groups(self):
- """Test retrieving domains for a list of groups.
-
- Test Plan:
-
- - Create three domains, three groups and one role
- - Assign a non-inherited group role to two domains, and an inherited
- group role to the third
- - Ensure only the domains with non-inherited roles are returned
-
- """
- domain_list = []
- group_list = []
- group_id_list = []
- for _ in range(3):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- domain_list.append(domain)
-
- group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
- group = self.identity_api.create_group(group)
- group_list.append(group)
- group_id_list.append(group['id'])
-
- role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role1['id'], role1)
-
- # Assign the roles - one is inherited
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain_list[0]['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain_list[1]['id'],
- role_id=role1['id'])
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- domain_id=domain_list[2]['id'],
- role_id=role1['id'],
- inherited_to_projects=True)
-
- # Now list the domains that have roles for any of the 3 groups
- # We shouldn't get back domain[2] since that had an inherited role.
-
- domain_refs = (
- self.assignment_api.list_domains_for_groups(group_id_list))
-
- self.assertThat(domain_refs, matchers.HasLength(2))
- self.assertIn(domain_list[0], domain_refs)
- self.assertIn(domain_list[1], domain_refs)
-
- def test_list_projects_for_groups(self):
- """Test retrieving projects for a list of groups.
-
- Test Plan:
-
- - Create two domains, four projects, seven groups and seven roles
- - Project1-3 are in Domain1, Project4 is in Domain2
- - Domain2/Project4 are spoilers
- - Project1 and 2 have direct group roles, Project3 has no direct
- roles but should inherit a group role from Domain1
- - Get the projects for the group roles that are assigned to Project1
- Project2 and the inherited one on Domain1. Depending on whether we
- have enabled inheritance, we should either get back just the projects
- with direct roles (Project 1 and 2) or also Project3 due to its
- inherited role from Domain1.
-
- """
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id'], 'is_domain': False}
- project1 = self.resource_api.create_project(project1['id'], project1)
- project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id'], 'is_domain': False}
- project2 = self.resource_api.create_project(project2['id'], project2)
- project3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id'], 'is_domain': False}
- project3 = self.resource_api.create_project(project3['id'], project3)
- project4 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain2['id'], 'is_domain': False}
- project4 = self.resource_api.create_project(project4['id'], project4)
- group_list = []
- role_list = []
- for _ in range(7):
- group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
- group = self.identity_api.create_group(group)
- group_list.append(group)
-
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- # Assign the roles - one inherited and one non-inherited on Domain1,
- # plus one on Project1 and Project2
- self.assignment_api.create_grant(group_id=group_list[0]['id'],
- domain_id=domain1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group_list[1]['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[2]['id'],
- project_id=project1['id'],
- role_id=role_list[2]['id'])
- self.assignment_api.create_grant(group_id=group_list[3]['id'],
- project_id=project2['id'],
- role_id=role_list[3]['id'])
-
- # ...and a few of spoiler assignments to Domain2/Project4
- self.assignment_api.create_grant(group_id=group_list[4]['id'],
- domain_id=domain2['id'],
- role_id=role_list[4]['id'])
- self.assignment_api.create_grant(group_id=group_list[5]['id'],
- domain_id=domain2['id'],
- role_id=role_list[5]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group_list[6]['id'],
- project_id=project4['id'],
- role_id=role_list[6]['id'])
-
- # Now get the projects for the groups that have roles on Project1,
- # Project2 and the inherited role on Domain!. With inheritance off,
- # we should only get back the projects with direct role.
-
- self.config_fixture.config(group='os_inherit', enabled=False)
- group_id_list = [group_list[1]['id'], group_list[2]['id'],
- group_list[3]['id']]
- project_refs = (
- self.assignment_api.list_projects_for_groups(group_id_list))
-
- self.assertThat(project_refs, matchers.HasLength(2))
- self.assertIn(project1, project_refs)
- self.assertIn(project2, project_refs)
-
- # With inheritance on, we should also get back the Project3 due to the
- # inherited role from its owning domain.
-
- self.config_fixture.config(group='os_inherit', enabled=True)
- project_refs = (
- self.assignment_api.list_projects_for_groups(group_id_list))
-
- self.assertThat(project_refs, matchers.HasLength(3))
- self.assertIn(project1, project_refs)
- self.assertIn(project2, project_refs)
- self.assertIn(project3, project_refs)
-
- def test_update_role_no_name(self):
- # A user can update a role and not include the name.
-
- # description is picked just because it's not name.
- self.role_api.update_role(self.role_member['id'],
- {'description': uuid.uuid4().hex})
- # If the previous line didn't raise an exception then the test passes.
-
- def test_update_role_same_name(self):
- # A user can update a role and set the name to be the same as it was.
-
- self.role_api.update_role(self.role_member['id'],
- {'name': self.role_member['name']})
- # If the previous line didn't raise an exception then the test passes.
-
-
-class TokenTests(object):
- def _create_token_id(self):
- # Use a token signed by the cms module
- token_id = ""
- for i in range(1, 20):
- token_id += uuid.uuid4().hex
- return cms.cms_sign_token(token_id,
- CONF.signing.certfile,
- CONF.signing.keyfile)
-
- def _assert_revoked_token_list_matches_token_persistence(
- self, revoked_token_id_list):
- # Assert that the list passed in matches the list returned by the
- # token persistence service
- persistence_list = [
- x['id']
- for x in self.token_provider_api.list_revoked_tokens()
- ]
- self.assertEqual(persistence_list, revoked_token_id_list)
-
- def test_token_crud(self):
- token_id = self._create_token_id()
- data = {'id': token_id, 'a': 'b',
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- expires = data_ref.pop('expires')
- data_ref.pop('user_id')
- self.assertIsInstance(expires, datetime.datetime)
- data_ref.pop('id')
- data.pop('id')
- self.assertDictEqual(data_ref, data)
-
- new_data_ref = self.token_provider_api._persistence.get_token(token_id)
- expires = new_data_ref.pop('expires')
- self.assertIsInstance(expires, datetime.datetime)
- new_data_ref.pop('user_id')
- new_data_ref.pop('id')
-
- self.assertEqual(data, new_data_ref)
-
- self.token_provider_api._persistence.delete_token(token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.get_token, token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.delete_token, token_id)
-
- def create_token_sample_data(self, token_id=None, tenant_id=None,
- trust_id=None, user_id=None, expires=None):
- if token_id is None:
- token_id = self._create_token_id()
- if user_id is None:
- user_id = 'testuserid'
- # FIXME(morganfainberg): These tokens look nothing like "Real" tokens.
- # This should be fixed when token issuance is cleaned up.
- data = {'id': token_id, 'a': 'b',
- 'user': {'id': user_id}}
- if tenant_id is not None:
- data['tenant'] = {'id': tenant_id, 'name': tenant_id}
- if tenant_id is NULL_OBJECT:
- data['tenant'] = None
- if expires is not None:
- data['expires'] = expires
- if trust_id is not None:
- data['trust_id'] = trust_id
- data.setdefault('access', {}).setdefault('trust', {})
- # Testuserid2 is used here since a trustee will be different in
- # the cases of impersonation and therefore should not match the
- # token's user_id.
- data['access']['trust']['trustee_user_id'] = 'testuserid2'
- data['token_version'] = provider.V2
- # Issue token stores a copy of all token data at token['token_data'].
- # This emulates that assumption as part of the test.
- data['token_data'] = copy.deepcopy(data)
- new_token = self.token_provider_api._persistence.create_token(token_id,
- data)
- return new_token['id'], data
-
- def test_delete_tokens(self):
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(0, len(tokens))
- token_id1, data = self.create_token_sample_data(
- tenant_id='testtenantid')
- token_id2, data = self.create_token_sample_data(
- tenant_id='testtenantid')
- token_id3, data = self.create_token_sample_data(
- tenant_id='testtenantid',
- user_id='testuserid1')
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(2, len(tokens))
- self.assertIn(token_id2, tokens)
- self.assertIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_tokens(
- user_id='testuserid',
- tenant_id='testtenantid')
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(0, len(tokens))
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id1)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id2)
-
- self.token_provider_api._persistence.get_token(token_id3)
-
- def test_delete_tokens_trust(self):
- tokens = self.token_provider_api._persistence._list_tokens(
- user_id='testuserid')
- self.assertEqual(0, len(tokens))
- token_id1, data = self.create_token_sample_data(
- tenant_id='testtenantid',
- trust_id='testtrustid')
- token_id2, data = self.create_token_sample_data(
- tenant_id='testtenantid',
- user_id='testuserid1',
- trust_id='testtrustid1')
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_tokens(
- user_id='testuserid',
- tenant_id='testtenantid',
- trust_id='testtrustid')
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id1)
- self.token_provider_api._persistence.get_token(token_id2)
-
- def _test_token_list(self, token_list_fn):
- tokens = token_list_fn('testuserid')
- self.assertEqual(0, len(tokens))
- token_id1, data = self.create_token_sample_data()
- tokens = token_list_fn('testuserid')
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id1, tokens)
- token_id2, data = self.create_token_sample_data()
- tokens = token_list_fn('testuserid')
- self.assertEqual(2, len(tokens))
- self.assertIn(token_id2, tokens)
- self.assertIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_token(token_id1)
- tokens = token_list_fn('testuserid')
- self.assertIn(token_id2, tokens)
- self.assertNotIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_token(token_id2)
- tokens = token_list_fn('testuserid')
- self.assertNotIn(token_id2, tokens)
- self.assertNotIn(token_id1, tokens)
-
- # tenant-specific tokens
- tenant1 = uuid.uuid4().hex
- tenant2 = uuid.uuid4().hex
- token_id3, data = self.create_token_sample_data(tenant_id=tenant1)
- token_id4, data = self.create_token_sample_data(tenant_id=tenant2)
- # test for existing but empty tenant (LP:1078497)
- token_id5, data = self.create_token_sample_data(tenant_id=NULL_OBJECT)
- tokens = token_list_fn('testuserid')
- self.assertEqual(3, len(tokens))
- self.assertNotIn(token_id1, tokens)
- self.assertNotIn(token_id2, tokens)
- self.assertIn(token_id3, tokens)
- self.assertIn(token_id4, tokens)
- self.assertIn(token_id5, tokens)
- tokens = token_list_fn('testuserid', tenant2)
- self.assertEqual(1, len(tokens))
- self.assertNotIn(token_id1, tokens)
- self.assertNotIn(token_id2, tokens)
- self.assertNotIn(token_id3, tokens)
- self.assertIn(token_id4, tokens)
-
- def test_token_list(self):
- self._test_token_list(
- self.token_provider_api._persistence._list_tokens)
-
- def test_token_list_trust(self):
- trust_id = uuid.uuid4().hex
- token_id5, data = self.create_token_sample_data(trust_id=trust_id)
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid', trust_id=trust_id)
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id5, tokens)
-
- def test_get_token_404(self):
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- uuid.uuid4().hex)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- None)
-
- def test_delete_token_404(self):
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.delete_token,
- uuid.uuid4().hex)
-
- def test_expired_token(self):
- token_id = uuid.uuid4().hex
- expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- data_ref.pop('user_id')
- self.assertDictEqual(data_ref, data)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id)
-
- def test_null_expires_token(self):
- token_id = uuid.uuid4().hex
- data = {'id': token_id, 'id_hash': token_id, 'a': 'b', 'expires': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- self.assertIsNotNone(data_ref['expires'])
- new_data_ref = self.token_provider_api._persistence.get_token(token_id)
-
- # MySQL doesn't store microseconds, so discard them before testing
- data_ref['expires'] = data_ref['expires'].replace(microsecond=0)
- new_data_ref['expires'] = new_data_ref['expires'].replace(
- microsecond=0)
-
- self.assertEqual(data_ref, new_data_ref)
-
- def check_list_revoked_tokens(self, token_ids):
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- for token_id in token_ids:
- self.assertIn(token_id, revoked_ids)
-
- def delete_token(self):
- token_id = uuid.uuid4().hex
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- self.token_provider_api._persistence.delete_token(token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- data_ref['id'])
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.delete_token,
- data_ref['id'])
- return token_id
-
- def test_list_revoked_tokens_returns_empty_list(self):
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertEqual([], revoked_ids)
-
- def test_list_revoked_tokens_for_single_token(self):
- self.check_list_revoked_tokens([self.delete_token()])
-
- def test_list_revoked_tokens_for_multiple_tokens(self):
- self.check_list_revoked_tokens([self.delete_token()
- for x in six.moves.range(2)])
-
- def test_flush_expired_token(self):
- token_id = uuid.uuid4().hex
- expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- data_ref.pop('user_id')
- self.assertDictEqual(data_ref, data)
-
- token_id = uuid.uuid4().hex
- expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- data_ref.pop('user_id')
- self.assertDictEqual(data_ref, data)
-
- self.token_provider_api._persistence.flush_expired_tokens()
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id, tokens)
-
- @unit.skip_if_cache_disabled('token')
- def test_revocation_list_cache(self):
- expire_time = timeutils.utcnow() + datetime.timedelta(minutes=10)
- token_id = uuid.uuid4().hex
- token_data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- token2_id = uuid.uuid4().hex
- token2_data = {'id_hash': token2_id, 'id': token2_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- # Create 2 Tokens.
- self.token_provider_api._persistence.create_token(token_id,
- token_data)
- self.token_provider_api._persistence.create_token(token2_id,
- token2_data)
- # Verify the revocation list is empty.
- self.assertEqual(
- [], self.token_provider_api._persistence.list_revoked_tokens())
- self.assertEqual([], self.token_provider_api.list_revoked_tokens())
- # Delete a token directly, bypassing the manager.
- self.token_provider_api._persistence.driver.delete_token(token_id)
- # Verify the revocation list is still empty.
- self.assertEqual(
- [], self.token_provider_api._persistence.list_revoked_tokens())
- self.assertEqual([], self.token_provider_api.list_revoked_tokens())
- # Invalidate the revocation list.
- self.token_provider_api._persistence.invalidate_revocation_list()
- # Verify the deleted token is in the revocation list.
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertIn(token_id, revoked_ids)
- # Delete the second token, through the manager
- self.token_provider_api._persistence.delete_token(token2_id)
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- # Verify both tokens are in the revocation list.
- self.assertIn(token_id, revoked_ids)
- self.assertIn(token2_id, revoked_ids)
-
- def _test_predictable_revoked_pki_token_id(self, hash_fn):
- token_id = self._create_token_id()
- token_id_hash = hash_fn(token_id).hexdigest()
- token = {'user': {'id': uuid.uuid4().hex}}
-
- self.token_provider_api._persistence.create_token(token_id, token)
- self.token_provider_api._persistence.delete_token(token_id)
-
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertIn(token_id_hash, revoked_ids)
- self.assertNotIn(token_id, revoked_ids)
- for t in self.token_provider_api._persistence.list_revoked_tokens():
- self.assertIn('expires', t)
-
- def test_predictable_revoked_pki_token_id_default(self):
- self._test_predictable_revoked_pki_token_id(hashlib.md5)
-
- def test_predictable_revoked_pki_token_id_sha256(self):
- self.config_fixture.config(group='token', hash_algorithm='sha256')
- self._test_predictable_revoked_pki_token_id(hashlib.sha256)
-
- def test_predictable_revoked_uuid_token_id(self):
- token_id = uuid.uuid4().hex
- token = {'user': {'id': uuid.uuid4().hex}}
-
- self.token_provider_api._persistence.create_token(token_id, token)
- self.token_provider_api._persistence.delete_token(token_id)
-
- revoked_tokens = self.token_provider_api.list_revoked_tokens()
- revoked_ids = [x['id'] for x in revoked_tokens]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertIn(token_id, revoked_ids)
- for t in revoked_tokens:
- self.assertIn('expires', t)
-
- def test_create_unicode_token_id(self):
- token_id = six.text_type(self._create_token_id())
- self.create_token_sample_data(token_id=token_id)
- self.token_provider_api._persistence.get_token(token_id)
-
- def test_create_unicode_user_id(self):
- user_id = six.text_type(uuid.uuid4().hex)
- token_id, data = self.create_token_sample_data(user_id=user_id)
- self.token_provider_api._persistence.get_token(token_id)
-
- def test_token_expire_timezone(self):
-
- @test_utils.timezone
- def _create_token(expire_time):
- token_id = uuid.uuid4().hex
- user_id = six.text_type(uuid.uuid4().hex)
- return self.create_token_sample_data(token_id=token_id,
- user_id=user_id,
- expires=expire_time)
-
- for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']:
- test_utils.TZ = 'UTC' + d
- expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
- token_id, data_in = _create_token(expire_time)
- data_get = self.token_provider_api._persistence.get_token(token_id)
-
- self.assertEqual(data_in['id'], data_get['id'],
- 'TZ=%s' % test_utils.TZ)
-
- expire_time_expired = (
- timeutils.utcnow() + datetime.timedelta(minutes=-1))
- token_id, data_in = _create_token(expire_time_expired)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- data_in['id'])
-
-
-class TokenCacheInvalidation(object):
- def _create_test_data(self):
- self.user = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
- self.tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
-
- # Create an equivalent of a scoped token
- token_dict = {'user': self.user, 'tenant': self.tenant,
- 'metadata': {}, 'id': 'placeholder'}
- token_id, data = self.token_provider_api.issue_v2_token(token_dict)
- self.scoped_token_id = token_id
-
- # ..and an un-scoped one
- token_dict = {'user': self.user, 'tenant': None,
- 'metadata': {}, 'id': 'placeholder'}
- token_id, data = self.token_provider_api.issue_v2_token(token_dict)
- self.unscoped_token_id = token_id
-
- # Validate them, in the various ways possible - this will load the
- # responses into the token cache.
- self._check_scoped_tokens_are_valid()
- self._check_unscoped_tokens_are_valid()
-
- def _check_unscoped_tokens_are_invalid(self):
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_token,
- self.unscoped_token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- self.unscoped_token_id)
-
- def _check_scoped_tokens_are_invalid(self):
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_token,
- self.scoped_token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_token,
- self.scoped_token_id,
- self.tenant['id'])
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- self.scoped_token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- self.scoped_token_id,
- self.tenant['id'])
-
- def _check_scoped_tokens_are_valid(self):
- self.token_provider_api.validate_token(self.scoped_token_id)
- self.token_provider_api.validate_token(
- self.scoped_token_id, belongs_to=self.tenant['id'])
- self.token_provider_api.validate_v2_token(self.scoped_token_id)
- self.token_provider_api.validate_v2_token(
- self.scoped_token_id, belongs_to=self.tenant['id'])
-
- def _check_unscoped_tokens_are_valid(self):
- self.token_provider_api.validate_token(self.unscoped_token_id)
- self.token_provider_api.validate_v2_token(self.unscoped_token_id)
-
- def test_delete_unscoped_token(self):
- self.token_provider_api._persistence.delete_token(
- self.unscoped_token_id)
- self._check_unscoped_tokens_are_invalid()
- self._check_scoped_tokens_are_valid()
-
- def test_delete_scoped_token_by_id(self):
- self.token_provider_api._persistence.delete_token(self.scoped_token_id)
- self._check_scoped_tokens_are_invalid()
- self._check_unscoped_tokens_are_valid()
-
- def test_delete_scoped_token_by_user(self):
- self.token_provider_api._persistence.delete_tokens(self.user['id'])
- # Since we are deleting all tokens for this user, they should all
- # now be invalid.
- self._check_scoped_tokens_are_invalid()
- self._check_unscoped_tokens_are_invalid()
-
- def test_delete_scoped_token_by_user_and_tenant(self):
- self.token_provider_api._persistence.delete_tokens(
- self.user['id'],
- tenant_id=self.tenant['id'])
- self._check_scoped_tokens_are_invalid()
- self._check_unscoped_tokens_are_valid()
-
-
-class TrustTests(object):
- def create_sample_trust(self, new_id, remaining_uses=None):
- self.trustor = self.user_foo
- self.trustee = self.user_two
- trust_data = (self.trust_api.create_trust
- (new_id,
- {'trustor_user_id': self.trustor['id'],
- 'trustee_user_id': self.user_two['id'],
- 'project_id': self.tenant_bar['id'],
- 'expires_at': timeutils.
- parse_isotime('2031-02-18T18:10:00Z'),
- 'impersonation': True,
- 'remaining_uses': remaining_uses},
- roles=[{"id": "member"},
- {"id": "other"},
- {"id": "browser"}]))
- return trust_data
-
- def test_delete_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
- trust_id = trust_data['id']
- self.assertIsNotNone(trust_data)
- trust_data = self.trust_api.get_trust(trust_id)
- self.assertEqual(new_id, trust_data['id'])
- self.trust_api.delete_trust(trust_id)
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.get_trust,
- trust_id)
-
- def test_delete_trust_not_found(self):
- trust_id = uuid.uuid4().hex
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.delete_trust,
- trust_id)
-
- def test_get_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
- trust_id = trust_data['id']
- self.assertIsNotNone(trust_data)
- trust_data = self.trust_api.get_trust(trust_id)
- self.assertEqual(new_id, trust_data['id'])
- self.trust_api.delete_trust(trust_data['id'])
-
- def test_get_deleted_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
- self.assertIsNotNone(trust_data)
- self.assertIsNone(trust_data['deleted_at'])
- self.trust_api.delete_trust(new_id)
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.get_trust,
- new_id)
- deleted_trust = self.trust_api.get_trust(trust_data['id'],
- deleted=True)
- self.assertEqual(trust_data['id'], deleted_trust['id'])
- self.assertIsNotNone(deleted_trust.get('deleted_at'))
-
- def test_create_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
-
- self.assertEqual(new_id, trust_data['id'])
- self.assertEqual(self.trustee['id'], trust_data['trustee_user_id'])
- self.assertEqual(self.trustor['id'], trust_data['trustor_user_id'])
- self.assertTrue(timeutils.normalize_time(trust_data['expires_at']) >
- timeutils.utcnow())
-
- self.assertEqual([{'id': 'member'},
- {'id': 'other'},
- {'id': 'browser'}], trust_data['roles'])
-
- def test_list_trust_by_trustee(self):
- for i in range(3):
- self.create_sample_trust(uuid.uuid4().hex)
- trusts = self.trust_api.list_trusts_for_trustee(self.trustee['id'])
- self.assertEqual(3, len(trusts))
- self.assertEqual(trusts[0]["trustee_user_id"], self.trustee['id'])
- trusts = self.trust_api.list_trusts_for_trustee(self.trustor['id'])
- self.assertEqual(0, len(trusts))
-
- def test_list_trust_by_trustor(self):
- for i in range(3):
- self.create_sample_trust(uuid.uuid4().hex)
- trusts = self.trust_api.list_trusts_for_trustor(self.trustor['id'])
- self.assertEqual(3, len(trusts))
- self.assertEqual(trusts[0]["trustor_user_id"], self.trustor['id'])
- trusts = self.trust_api.list_trusts_for_trustor(self.trustee['id'])
- self.assertEqual(0, len(trusts))
-
- def test_list_trusts(self):
- for i in range(3):
- self.create_sample_trust(uuid.uuid4().hex)
- trusts = self.trust_api.list_trusts()
- self.assertEqual(3, len(trusts))
-
- def test_trust_has_remaining_uses_positive(self):
- # create a trust with limited uses, check that we have uses left
- trust_data = self.create_sample_trust(uuid.uuid4().hex,
- remaining_uses=5)
- self.assertEqual(5, trust_data['remaining_uses'])
- # create a trust with unlimited uses, check that we have uses left
- trust_data = self.create_sample_trust(uuid.uuid4().hex)
- self.assertIsNone(trust_data['remaining_uses'])
-
- def test_trust_has_remaining_uses_negative(self):
- # try to create a trust with no remaining uses, check that it fails
- self.assertRaises(exception.ValidationError,
- self.create_sample_trust,
- uuid.uuid4().hex,
- remaining_uses=0)
- # try to create a trust with negative remaining uses,
- # check that it fails
- self.assertRaises(exception.ValidationError,
- self.create_sample_trust,
- uuid.uuid4().hex,
- remaining_uses=-12)
-
- def test_consume_use(self):
- # consume a trust repeatedly until it has no uses anymore
- trust_data = self.create_sample_trust(uuid.uuid4().hex,
- remaining_uses=2)
- self.trust_api.consume_use(trust_data['id'])
- t = self.trust_api.get_trust(trust_data['id'])
- self.assertEqual(1, t['remaining_uses'])
- self.trust_api.consume_use(trust_data['id'])
- # This was the last use, the trust isn't available anymore
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.get_trust,
- trust_data['id'])
-
-
-class CatalogTests(object):
-
- _legacy_endpoint_id_in_endpoint = False
- _enabled_default_to_true_when_creating_endpoint = False
-
- def test_region_crud(self):
- # create
- region_id = '0' * 255
- new_region = {
- 'id': region_id,
- 'description': uuid.uuid4().hex,
- }
- res = self.catalog_api.create_region(
- new_region.copy())
- # Ensure that we don't need to have a
- # parent_region_id in the original supplied
- # ref dict, but that it will be returned from
- # the endpoint, with None value.
- expected_region = new_region.copy()
- expected_region['parent_region_id'] = None
- self.assertDictEqual(res, expected_region)
-
- # Test adding another region with the one above
- # as its parent. We will check below whether deleting
- # the parent successfully deletes any child regions.
- parent_region_id = region_id
- region_id = uuid.uuid4().hex
- new_region = {
- 'id': region_id,
- 'description': uuid.uuid4().hex,
- 'parent_region_id': parent_region_id,
- }
- res = self.catalog_api.create_region(
- new_region.copy())
- self.assertDictEqual(new_region, res)
-
- # list
- regions = self.catalog_api.list_regions()
- self.assertThat(regions, matchers.HasLength(2))
- region_ids = [x['id'] for x in regions]
- self.assertIn(parent_region_id, region_ids)
- self.assertIn(region_id, region_ids)
-
- # update
- region_desc_update = {'description': uuid.uuid4().hex}
- res = self.catalog_api.update_region(region_id, region_desc_update)
- expected_region = new_region.copy()
- expected_region['description'] = region_desc_update['description']
- self.assertDictEqual(expected_region, res)
-
- # delete
- self.catalog_api.delete_region(parent_region_id)
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.delete_region,
- parent_region_id)
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- parent_region_id)
- # Ensure the child is also gone...
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_id)
-
- def _create_region_with_parent_id(self, parent_id=None):
- new_region = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'parent_region_id': parent_id
- }
- self.catalog_api.create_region(
- new_region)
- return new_region
-
- def test_list_regions_filtered_by_parent_region_id(self):
- new_region = self._create_region_with_parent_id()
- parent_id = new_region['id']
- new_region = self._create_region_with_parent_id(parent_id)
- new_region = self._create_region_with_parent_id(parent_id)
-
- # filter by parent_region_id
- hints = driver_hints.Hints()
- hints.add_filter('parent_region_id', parent_id)
- regions = self.catalog_api.list_regions(hints)
- for region in regions:
- self.assertEqual(parent_id, region['parent_region_id'])
-
- @unit.skip_if_cache_disabled('catalog')
- def test_cache_layer_region_crud(self):
- region_id = uuid.uuid4().hex
- new_region = {
- 'id': region_id,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_region(new_region.copy())
- updated_region = copy.deepcopy(new_region)
- updated_region['description'] = uuid.uuid4().hex
- # cache the result
- self.catalog_api.get_region(region_id)
- # update the region bypassing catalog_api
- self.catalog_api.driver.update_region(region_id, updated_region)
- self.assertDictContainsSubset(new_region,
- self.catalog_api.get_region(region_id))
- self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
- self.assertDictContainsSubset(updated_region,
- self.catalog_api.get_region(region_id))
- # delete the region
- self.catalog_api.driver.delete_region(region_id)
- # still get the old region
- self.assertDictContainsSubset(updated_region,
- self.catalog_api.get_region(region_id))
- self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region, region_id)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_region(self):
- region_id = uuid.uuid4().hex
- new_region = {
- 'id': region_id,
- 'description': uuid.uuid4().hex
- }
- self.catalog_api.create_region(new_region)
-
- # cache the region
- self.catalog_api.get_region(region_id)
-
- # update the region via catalog_api
- new_description = {'description': uuid.uuid4().hex}
- self.catalog_api.update_region(region_id, new_description)
-
- # assert that we can get the new region
- current_region = self.catalog_api.get_region(region_id)
- self.assertEqual(new_description['description'],
- current_region['description'])
-
- def test_create_region_with_duplicate_id(self):
- region_id = uuid.uuid4().hex
- new_region = {
- 'id': region_id,
- 'description': uuid.uuid4().hex
- }
- self.catalog_api.create_region(new_region)
- # Create region again with duplicate id
- self.assertRaises(exception.Conflict,
- self.catalog_api.create_region,
- new_region)
-
- def test_get_region_404(self):
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- uuid.uuid4().hex)
-
- def test_delete_region_404(self):
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.delete_region,
- uuid.uuid4().hex)
-
- def test_create_region_invalid_parent_region_404(self):
- region_id = uuid.uuid4().hex
- new_region = {
- 'id': region_id,
- 'description': uuid.uuid4().hex,
- 'parent_region_id': 'nonexisting'
- }
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.create_region,
- new_region)
-
- def test_avoid_creating_circular_references_in_regions_update(self):
- region_one = self._create_region_with_parent_id()
-
- # self circle: region_one->region_one
- self.assertRaises(exception.CircularRegionHierarchyError,
- self.catalog_api.update_region,
- region_one['id'],
- {'parent_region_id': region_one['id']})
-
- # region_one->region_two->region_one
- region_two = self._create_region_with_parent_id(region_one['id'])
- self.assertRaises(exception.CircularRegionHierarchyError,
- self.catalog_api.update_region,
- region_one['id'],
- {'parent_region_id': region_two['id']})
-
- # region_one region_two->region_three->region_four->region_two
- region_three = self._create_region_with_parent_id(region_two['id'])
- region_four = self._create_region_with_parent_id(region_three['id'])
- self.assertRaises(exception.CircularRegionHierarchyError,
- self.catalog_api.update_region,
- region_two['id'],
- {'parent_region_id': region_four['id']})
-
- @mock.patch.object(core.CatalogDriverV8,
- "_ensure_no_circle_in_hierarchical_regions")
- def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle):
- # turn off the enforcement so that cycles can be created for the test
- mock_ensure_on_circle.return_value = None
-
- region_one = self._create_region_with_parent_id()
-
- # self circle: region_one->region_one
- self.catalog_api.update_region(
- region_one['id'],
- {'parent_region_id': region_one['id']})
- self.catalog_api.delete_region(region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_one['id'])
-
- # region_one->region_two->region_one
- region_one = self._create_region_with_parent_id()
- region_two = self._create_region_with_parent_id(region_one['id'])
- self.catalog_api.update_region(
- region_one['id'],
- {'parent_region_id': region_two['id']})
- self.catalog_api.delete_region(region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_two['id'])
-
- # region_one->region_two->region_three->region_one
- region_one = self._create_region_with_parent_id()
- region_two = self._create_region_with_parent_id(region_one['id'])
- region_three = self._create_region_with_parent_id(region_two['id'])
- self.catalog_api.update_region(
- region_one['id'],
- {'parent_region_id': region_three['id']})
- self.catalog_api.delete_region(region_two['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_two['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_one['id'])
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.get_region,
- region_three['id'])
-
- def test_service_crud(self):
- # create
- service_id = uuid.uuid4().hex
- new_service = {
- 'id': service_id,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- res = self.catalog_api.create_service(
- service_id,
- new_service.copy())
- new_service['enabled'] = True
- self.assertDictEqual(new_service, res)
-
- # list
- services = self.catalog_api.list_services()
- self.assertIn(service_id, [x['id'] for x in services])
-
- # update
- service_name_update = {'name': uuid.uuid4().hex}
- res = self.catalog_api.update_service(service_id, service_name_update)
- expected_service = new_service.copy()
- expected_service['name'] = service_name_update['name']
- self.assertDictEqual(expected_service, res)
-
- # delete
- self.catalog_api.delete_service(service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.delete_service,
- service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.get_service,
- service_id)
-
- def _create_random_service(self):
- service_id = uuid.uuid4().hex
- new_service = {
- 'id': service_id,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- return self.catalog_api.create_service(service_id, new_service.copy())
-
- def test_service_filtering(self):
- target_service = self._create_random_service()
- unrelated_service1 = self._create_random_service()
- unrelated_service2 = self._create_random_service()
-
- # filter by type
- hint_for_type = driver_hints.Hints()
- hint_for_type.add_filter(name="type", value=target_service['type'])
- services = self.catalog_api.list_services(hint_for_type)
-
- self.assertEqual(1, len(services))
- filtered_service = services[0]
- self.assertEqual(target_service['type'], filtered_service['type'])
- self.assertEqual(target_service['id'], filtered_service['id'])
-
- # filter should have been removed, since it was already used by the
- # backend
- self.assertEqual(0, len(hint_for_type.filters))
-
- # the backend shouldn't filter by name, since this is handled by the
- # front end
- hint_for_name = driver_hints.Hints()
- hint_for_name.add_filter(name="name", value=target_service['name'])
- services = self.catalog_api.list_services(hint_for_name)
-
- self.assertEqual(3, len(services))
-
- # filter should still be there, since it wasn't used by the backend
- self.assertEqual(1, len(hint_for_name.filters))
-
- self.catalog_api.delete_service(target_service['id'])
- self.catalog_api.delete_service(unrelated_service1['id'])
- self.catalog_api.delete_service(unrelated_service2['id'])
-
- @unit.skip_if_cache_disabled('catalog')
- def test_cache_layer_service_crud(self):
- service_id = uuid.uuid4().hex
- new_service = {
- 'id': service_id,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- res = self.catalog_api.create_service(
- service_id,
- new_service.copy())
- new_service['enabled'] = True
- self.assertDictEqual(new_service, res)
- self.catalog_api.get_service(service_id)
- updated_service = copy.deepcopy(new_service)
- updated_service['description'] = uuid.uuid4().hex
- # update bypassing catalog api
- self.catalog_api.driver.update_service(service_id, updated_service)
- self.assertDictContainsSubset(new_service,
- self.catalog_api.get_service(service_id))
- self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
- self.assertDictContainsSubset(updated_service,
- self.catalog_api.get_service(service_id))
-
- # delete bypassing catalog api
- self.catalog_api.driver.delete_service(service_id)
- self.assertDictContainsSubset(updated_service,
- self.catalog_api.get_service(service_id))
- self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.delete_service,
- service_id)
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.get_service,
- service_id)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_service(self):
- service_id = uuid.uuid4().hex
- new_service = {
- 'id': service_id,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(
- service_id,
- new_service.copy())
-
- # cache the service
- self.catalog_api.get_service(service_id)
-
- # update the service via catalog api
- new_type = {'type': uuid.uuid4().hex}
- self.catalog_api.update_service(service_id, new_type)
-
- # assert that we can get the new service
- current_service = self.catalog_api.get_service(service_id)
- self.assertEqual(new_type['type'], current_service['type'])
-
- def test_delete_service_with_endpoint(self):
- # create a service
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service)
-
- # create an endpoint attached to the service
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'region': uuid.uuid4().hex,
- 'interface': uuid.uuid4().hex[:8],
- 'url': uuid.uuid4().hex,
- 'service_id': service['id'],
- }
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
-
- # deleting the service should also delete the endpoint
- self.catalog_api.delete_service(service['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- endpoint['id'])
-
- def test_cache_layer_delete_service_with_endpoint(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service)
-
- # create an endpoint attached to the service
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': None,
- 'interface': uuid.uuid4().hex[:8],
- 'url': uuid.uuid4().hex,
- 'service_id': service['id'],
- }
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- # cache the result
- self.catalog_api.get_service(service['id'])
- self.catalog_api.get_endpoint(endpoint['id'])
- # delete the service bypassing catalog api
- self.catalog_api.driver.delete_service(service['id'])
- self.assertDictContainsSubset(endpoint,
- self.catalog_api.
- get_endpoint(endpoint['id']))
- self.assertDictContainsSubset(service,
- self.catalog_api.
- get_service(service['id']))
- self.catalog_api.get_endpoint.invalidate(self.catalog_api,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- endpoint['id'])
- # multiple endpoints associated with a service
- second_endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': None,
- 'interface': uuid.uuid4().hex[:8],
- 'url': uuid.uuid4().hex,
- 'service_id': service['id'],
- }
- self.catalog_api.create_service(service['id'], service)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- self.catalog_api.create_endpoint(second_endpoint['id'],
- second_endpoint)
- self.catalog_api.delete_service(service['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- second_endpoint['id'])
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- second_endpoint['id'])
-
- def test_get_service_404(self):
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.get_service,
- uuid.uuid4().hex)
-
- def test_delete_service_404(self):
- self.assertRaises(exception.ServiceNotFound,
- self.catalog_api.delete_service,
- uuid.uuid4().hex)
-
- def test_create_endpoint_nonexistent_service(self):
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'service_id': uuid.uuid4().hex,
- }
- self.assertRaises(exception.ValidationError,
- self.catalog_api.create_endpoint,
- endpoint['id'],
- endpoint)
-
- def test_update_endpoint_nonexistent_service(self):
- dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
- self._create_endpoints())
- new_endpoint = {
- 'service_id': uuid.uuid4().hex,
- }
- self.assertRaises(exception.ValidationError,
- self.catalog_api.update_endpoint,
- enabled_endpoint['id'],
- new_endpoint)
-
- def test_create_endpoint_nonexistent_region(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service.copy())
-
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'service_id': service['id'],
- 'interface': 'public',
- 'url': uuid.uuid4().hex,
- 'region_id': uuid.uuid4().hex,
- }
- self.assertRaises(exception.ValidationError,
- self.catalog_api.create_endpoint,
- endpoint['id'],
- endpoint)
-
- def test_update_endpoint_nonexistent_region(self):
- dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
- self._create_endpoints())
- new_endpoint = {
- 'region_id': uuid.uuid4().hex,
- }
- self.assertRaises(exception.ValidationError,
- self.catalog_api.update_endpoint,
- enabled_endpoint['id'],
- new_endpoint)
-
- def test_get_endpoint_404(self):
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.get_endpoint,
- uuid.uuid4().hex)
-
- def test_delete_endpoint_404(self):
- self.assertRaises(exception.EndpointNotFound,
- self.catalog_api.delete_endpoint,
- uuid.uuid4().hex)
-
- def test_create_endpoint(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service.copy())
-
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': None,
- 'service_id': service['id'],
- 'interface': 'public',
- 'url': uuid.uuid4().hex,
- }
- self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
-
- def test_update_endpoint(self):
- dummy_service_ref, endpoint_ref, dummy_disabled_endpoint_ref = (
- self._create_endpoints())
- res = self.catalog_api.update_endpoint(endpoint_ref['id'],
- {'interface': 'private'})
- expected_endpoint = endpoint_ref.copy()
- expected_endpoint['interface'] = 'private'
- if self._legacy_endpoint_id_in_endpoint:
- expected_endpoint['legacy_endpoint_id'] = None
- if self._enabled_default_to_true_when_creating_endpoint:
- expected_endpoint['enabled'] = True
- self.assertDictEqual(expected_endpoint, res)
-
- def _create_endpoints(self):
- # Creates a service and 2 endpoints for the service in the same region.
- # The 'public' interface is enabled and the 'internal' interface is
- # disabled.
-
- def create_endpoint(service_id, region, **kwargs):
- id_ = uuid.uuid4().hex
- ref = {
- 'id': id_,
- 'interface': 'public',
- 'region_id': region,
- 'service_id': service_id,
- 'url': 'http://localhost/%s' % uuid.uuid4().hex,
- }
- ref.update(kwargs)
- self.catalog_api.create_endpoint(id_, ref)
- return ref
-
- # Create a service for use with the endpoints.
- service_id = uuid.uuid4().hex
- service_ref = {
- 'id': service_id,
- 'name': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service_id, service_ref)
-
- region = {'id': uuid.uuid4().hex}
- self.catalog_api.create_region(region)
-
- # Create endpoints
- enabled_endpoint_ref = create_endpoint(service_id, region['id'])
- disabled_endpoint_ref = create_endpoint(
- service_id, region['id'], enabled=False, interface='internal')
-
- return service_ref, enabled_endpoint_ref, disabled_endpoint_ref
-
- def test_list_endpoints(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service.copy())
-
- expected_ids = set([uuid.uuid4().hex for _ in range(3)])
- for endpoint_id in expected_ids:
- endpoint = {
- 'id': endpoint_id,
- 'region_id': None,
- 'service_id': service['id'],
- 'interface': 'public',
- 'url': uuid.uuid4().hex,
- }
- self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
-
- endpoints = self.catalog_api.list_endpoints()
- self.assertEqual(expected_ids, set(e['id'] for e in endpoints))
-
- def test_get_catalog_endpoint_disabled(self):
- """Get back only enabled endpoints when get the v2 catalog."""
-
- service_ref, enabled_endpoint_ref, dummy_disabled_endpoint_ref = (
- self._create_endpoints())
-
- user_id = uuid.uuid4().hex
- project_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_catalog(user_id, project_id)
-
- exp_entry = {
- 'id': enabled_endpoint_ref['id'],
- 'name': service_ref['name'],
- 'publicURL': enabled_endpoint_ref['url'],
- }
-
- region = enabled_endpoint_ref['region_id']
- self.assertEqual(exp_entry, catalog[region][service_ref['type']])
-
- def test_get_v3_catalog_endpoint_disabled(self):
- """Get back only enabled endpoints when get the v3 catalog."""
-
- enabled_endpoint_ref = self._create_endpoints()[1]
-
- user_id = uuid.uuid4().hex
- project_id = uuid.uuid4().hex
- catalog = self.catalog_api.get_v3_catalog(user_id, project_id)
-
- endpoint_ids = [x['id'] for x in catalog[0]['endpoints']]
- self.assertEqual([enabled_endpoint_ref['id']], endpoint_ids)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_endpoint(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service)
-
- # create an endpoint attached to the service
- endpoint_id = uuid.uuid4().hex
- endpoint = {
- 'id': endpoint_id,
- 'region_id': None,
- 'interface': uuid.uuid4().hex[:8],
- 'url': uuid.uuid4().hex,
- 'service_id': service['id'],
- }
- self.catalog_api.create_endpoint(endpoint_id, endpoint)
-
- # cache the endpoint
- self.catalog_api.get_endpoint(endpoint_id)
-
- # update the endpoint via catalog api
- new_url = {'url': uuid.uuid4().hex}
- self.catalog_api.update_endpoint(endpoint_id, new_url)
-
- # assert that we can get the new endpoint
- current_endpoint = self.catalog_api.get_endpoint(endpoint_id)
- self.assertEqual(new_url['url'], current_endpoint['url'])
-
-
-class PolicyTests(object):
- def _new_policy_ref(self):
- return {
- 'id': uuid.uuid4().hex,
- 'policy': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'endpoint_id': uuid.uuid4().hex,
- }
-
- def assertEqualPolicies(self, a, b):
- self.assertEqual(a['id'], b['id'])
- self.assertEqual(a['endpoint_id'], b['endpoint_id'])
- self.assertEqual(a['policy'], b['policy'])
- self.assertEqual(a['type'], b['type'])
-
- def test_create(self):
- ref = self._new_policy_ref()
- res = self.policy_api.create_policy(ref['id'], ref)
- self.assertEqualPolicies(ref, res)
-
- def test_get(self):
- ref = self._new_policy_ref()
- res = self.policy_api.create_policy(ref['id'], ref)
-
- res = self.policy_api.get_policy(ref['id'])
- self.assertEqualPolicies(ref, res)
-
- def test_list(self):
- ref = self._new_policy_ref()
- self.policy_api.create_policy(ref['id'], ref)
-
- res = self.policy_api.list_policies()
- res = [x for x in res if x['id'] == ref['id']][0]
- self.assertEqualPolicies(ref, res)
-
- def test_update(self):
- ref = self._new_policy_ref()
- self.policy_api.create_policy(ref['id'], ref)
- orig = ref
-
- ref = self._new_policy_ref()
-
- # (cannot change policy ID)
- self.assertRaises(exception.ValidationError,
- self.policy_api.update_policy,
- orig['id'],
- ref)
-
- ref['id'] = orig['id']
- res = self.policy_api.update_policy(orig['id'], ref)
- self.assertEqualPolicies(ref, res)
-
- def test_delete(self):
- ref = self._new_policy_ref()
- self.policy_api.create_policy(ref['id'], ref)
-
- self.policy_api.delete_policy(ref['id'])
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.delete_policy,
- ref['id'])
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.get_policy,
- ref['id'])
- res = self.policy_api.list_policies()
- self.assertFalse(len([x for x in res if x['id'] == ref['id']]))
-
- def test_get_policy_404(self):
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.get_policy,
- uuid.uuid4().hex)
-
- def test_update_policy_404(self):
- ref = self._new_policy_ref()
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.update_policy,
- ref['id'],
- ref)
-
- def test_delete_policy_404(self):
- self.assertRaises(exception.PolicyNotFound,
- self.policy_api.delete_policy,
- uuid.uuid4().hex)
-
-
-class InheritanceTests(AssignmentTestHelperMixin):
-
- def test_role_assignments_user_domain_to_project_inheritance(self):
- test_plan = {
- 'entities': {'domains': {'users': 2, 'projects': 1},
- 'roles': 3},
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True},
- {'user': 1, 'role': 1, 'project': 0}],
- 'tests': [
- # List all direct assignments for user[0]
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': 'projects'}]},
- # Now the effective ones - so the domain role should turn into
- # a project role
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0}}]},
- # Narrow down to effective roles for user[0] and project[0]
- {'params': {'user': 0, 'project': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0}}]}
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=True)
- self.execute_assignment_test_plan(test_plan)
-
- def test_inherited_role_assignments_excluded_if_os_inherit_false(self):
- test_plan = {
- 'entities': {'domains': {'users': 2, 'groups': 1, 'projects': 1},
- 'roles': 4},
- 'group_memberships': [{'group': 0, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True},
- {'user': 1, 'role': 1, 'project': 0},
- {'group': 0, 'role': 3, 'project': 0}],
- 'tests': [
- # List all direct assignments for user[0], since os-inherit is
- # disabled, we should not see the inherited role
- {'params': {'user': 0},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0}]},
- # Same in effective mode - inherited roles should not be
- # included or expanded...but the group role should now
- # turn up as a user role, since group expansion is not
- # part of os-inherit.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'domain': 0},
- {'user': 0, 'role': 1, 'project': 0},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'group': 0}}]},
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=False)
- self.execute_assignment_test_plan(test_plan)
-
- def _test_crud_inherited_and_direct_assignment(self, **kwargs):
- """Tests inherited and direct assignments for the actor and target
-
- Ensure it is possible to create both inherited and direct role
- assignments for the same actor on the same target. The actor and the
- target are specified in the kwargs as ('user_id' or 'group_id') and
- ('project_id' or 'domain_id'), respectively.
-
- """
-
- # Create a new role to avoid assignments loaded from default fixtures
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- role = self.role_api.create_role(role['id'], role)
-
- # Define the common assigment entity
- assignment_entity = {'role_id': role['id']}
- assignment_entity.update(kwargs)
-
- # Define assignments under test
- direct_assignment_entity = assignment_entity.copy()
- inherited_assignment_entity = assignment_entity.copy()
- inherited_assignment_entity['inherited_to_projects'] = 'projects'
-
- # Create direct assignment and check grants
- self.assignment_api.create_grant(inherited_to_projects=False,
- **assignment_entity)
-
- grants = self.assignment_api.list_role_assignments_for_role(role['id'])
- self.assertThat(grants, matchers.HasLength(1))
- self.assertIn(direct_assignment_entity, grants)
-
- # Now add inherited assignment and check grants
- self.assignment_api.create_grant(inherited_to_projects=True,
- **assignment_entity)
-
- grants = self.assignment_api.list_role_assignments_for_role(role['id'])
- self.assertThat(grants, matchers.HasLength(2))
- self.assertIn(direct_assignment_entity, grants)
- self.assertIn(inherited_assignment_entity, grants)
-
- # Delete both and check grants
- self.assignment_api.delete_grant(inherited_to_projects=False,
- **assignment_entity)
- self.assignment_api.delete_grant(inherited_to_projects=True,
- **assignment_entity)
-
- grants = self.assignment_api.list_role_assignments_for_role(role['id'])
- self.assertEqual([], grants)
-
- def test_crud_inherited_and_direct_assignment_for_user_on_domain(self):
- self._test_crud_inherited_and_direct_assignment(
- user_id=self.user_foo['id'], domain_id=DEFAULT_DOMAIN_ID)
-
- def test_crud_inherited_and_direct_assignment_for_group_on_domain(self):
- group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
- group = self.identity_api.create_group(group)
-
- self._test_crud_inherited_and_direct_assignment(
- group_id=group['id'], domain_id=DEFAULT_DOMAIN_ID)
-
- def test_crud_inherited_and_direct_assignment_for_user_on_project(self):
- self._test_crud_inherited_and_direct_assignment(
- user_id=self.user_foo['id'], project_id=self.tenant_baz['id'])
-
- def test_crud_inherited_and_direct_assignment_for_group_on_project(self):
- group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
- group = self.identity_api.create_group(group)
-
- self._test_crud_inherited_and_direct_assignment(
- group_id=group['id'], project_id=self.tenant_baz['id'])
-
- def test_inherited_role_grants_for_user(self):
- """Test inherited user roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create 3 roles
- - Create a domain, with a project and a user
- - Check no roles yet exit
- - Assign a direct user role to the project and a (non-inherited)
- user role to the domain
- - Get a list of effective roles - should only get the one direct role
- - Now add an inherited user role to the domain
- - Get a list of effective roles - should have two roles, one
- direct and one by virtue of the inherited user role
- - Also get effective roles for the domain - the role marked as
- inherited should not show up
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- role_list = []
- for _ in range(3):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
-
- # Create the first two roles - the domain one is not inherited
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
-
- # Now get the effective roles for the user and project, this
- # should only include the direct role assignment on the project
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(1, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
-
- # Now add an inherited role on the domain
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'],
- inherited_to_projects=True)
-
- # Now get the effective roles for the user and project again, this
- # should now include the inherited role on the domain
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(2, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
- self.assertIn(role_list[2]['id'], combined_list)
-
- # Finally, check that the inherited role does not appear as a valid
- # directly assigned role on the domain itself
- combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
- user1['id'], domain1['id'])
- self.assertEqual(1, len(combined_role_list))
- self.assertIn(role_list[1]['id'], combined_role_list)
-
- # TODO(henry-nash): The test above uses get_roles_for_user_and_project
- # and get_roles_for_user_and_domain, which will, in a subsequent patch,
- # be re-implemeted to simply call list_role_assignments (see blueprint
- # remove-role-metadata).
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once get_roles_for_user_and
- # project/domain have been re-implemented then the manual tests above
- # can be refactored to simply ensure it gives the same answers.
- test_plan = {
- # A domain with a user & project, plus 3 roles.
- 'entities': {'domains': {'users': 1, 'projects': 1},
- 'roles': 3},
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'domain': 0},
- {'user': 0, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] on project[0].
- # Should get one direct role and one inherited role.
- {'params': {'user': 0, 'project': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0}}]},
- # Ensure effective mode on the domain does not list the
- # inherited role on that domain
- {'params': {'user': 0, 'domain': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 1, 'domain': 0}]},
- # Ensure non-inherited mode also only returns the non-inherited
- # role on the domain
- {'params': {'user': 0, 'domain': 0, 'inherited': False},
- 'results': [{'user': 0, 'role': 1, 'domain': 0}]},
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_inherited_role_grants_for_group(self):
- """Test inherited group roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create 4 roles
- - Create a domain, with a project, user and two groups
- - Make the user a member of both groups
- - Check no roles yet exit
- - Assign a direct user role to the project and a (non-inherited)
- group role on the domain
- - Get a list of effective roles - should only get the one direct role
- - Now add two inherited group roles to the domain
- - Get a list of effective roles - should have three roles, one
- direct and two by virtue of inherited group roles
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- role_list = []
- for _ in range(4):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain1['id'], domain1)
- user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'password': uuid.uuid4().hex, 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group1 = self.identity_api.create_group(group1)
- group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'],
- 'enabled': True}
- group2 = self.identity_api.create_group(group2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
- self.resource_api.create_project(project1['id'], project1)
-
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user1['id'],
- group2['id'])
-
- roles_ref = self.assignment_api.list_grants(
- user_id=user1['id'],
- project_id=project1['id'])
- self.assertEqual(0, len(roles_ref))
-
- # Create two roles - the domain one is not inherited
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain1['id'],
- role_id=role_list[1]['id'])
-
- # Now get the effective roles for the user and project, this
- # should only include the direct role assignment on the project
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(1, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
-
- # Now add to more group roles, both inherited, to the domain
- self.assignment_api.create_grant(group_id=group2['id'],
- domain_id=domain1['id'],
- role_id=role_list[2]['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group2['id'],
- domain_id=domain1['id'],
- role_id=role_list[3]['id'],
- inherited_to_projects=True)
-
- # Now get the effective roles for the user and project again, this
- # should now include the inherited roles on the domain
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'], project1['id'])
- self.assertEqual(3, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
- self.assertIn(role_list[2]['id'], combined_list)
- self.assertIn(role_list[3]['id'], combined_list)
-
- # TODO(henry-nash): The test above uses get_roles_for_user_and_project
- # which will, in a subsequent patch, be re-implemeted to simply call
- # list_role_assignments (see blueprint remove-role-metadata).
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once
- # get_roles_for_user_and_project has been re-implemented then the
- # manual tests above can be refactored to simply ensure it gives
- # the same answers.
- test_plan = {
- # A domain with a user and project, 2 groups, plus 4 roles.
- 'entities': {'domains': {'users': 1, 'projects': 1, 'groups': 2},
- 'roles': 4},
- 'group_memberships': [{'group': 0, 'users': [0]},
- {'group': 1, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'group': 0, 'role': 1, 'domain': 0},
- {'group': 1, 'role': 2, 'domain': 0,
- 'inherited_to_projects': True},
- {'group': 1, 'role': 3, 'domain': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] on project[0].
- # Should get one direct role and both inherited roles, but
- # not the direct one on domain[0], even though user[0] is
- # in group[0].
- {'params': {'user': 0, 'project': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 2, 'project': 0,
- 'indirect': {'domain': 0, 'group': 1}},
- {'user': 0, 'role': 3, 'project': 0,
- 'indirect': {'domain': 0, 'group': 1}}]}
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_list_projects_for_user_with_inherited_grants(self):
- """Test inherited user roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create a domain, with two projects and a user
- - Assign an inherited user role on the domain, as well as a direct
- user role to a separate project in a different domain
- - Get a list of projects for user, should return all three projects
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- self.resource_api.create_project(project1['id'], project1)
- project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- self.resource_api.create_project(project2['id'], project2)
-
- # Create 2 grants, one on a project and one inherited grant
- # on the domain
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Should get back all three projects, one by virtue of the direct
- # grant, plus both projects in the domain
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(3, len(user_projects))
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemeted to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with 1 project, plus a second domain with 2 projects,
- # as well as a user. Also, create 2 roles.
- 'entities': {'domains': [{'projects': 1},
- {'users': 1, 'projects': 2}],
- 'roles': 2},
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'domain': 1,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0]
- # Should get one direct role plus one inherited role for each
- # project in domain
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'domain': 1}},
- {'user': 0, 'role': 1, 'project': 2,
- 'indirect': {'domain': 1}}]}
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_list_projects_for_user_with_inherited_user_project_grants(self):
- """Test inherited role assignments for users on nested projects.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create a hierarchy of projects with one root and one leaf project
- - Assign an inherited user role on root project
- - Assign a non-inherited user role on root project
- - Get a list of projects for user, should return both projects
- - Disable OS-INHERIT extension
- - Get a list of projects for user, should return only root project
-
- """
- # Enable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=True)
- root_project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(root_project['id'], root_project)
- leaf_project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': root_project['id'],
- 'is_domain': False}
- self.resource_api.create_project(leaf_project['id'], leaf_project)
-
- user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
- user = self.identity_api.create_user(user)
-
- # Grant inherited user role
- self.assignment_api.create_grant(user_id=user['id'],
- project_id=root_project['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Grant non-inherited user role
- self.assignment_api.create_grant(user_id=user['id'],
- project_id=root_project['id'],
- role_id=self.role_member['id'])
- # Should get back both projects: because the direct role assignment for
- # the root project and inherited role assignment for leaf project
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(2, len(user_projects))
- self.assertIn(root_project, user_projects)
- self.assertIn(leaf_project, user_projects)
-
- # Disable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Should get back just root project - due the direct role assignment
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(1, len(user_projects))
- self.assertIn(root_project, user_projects)
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemeted to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with a project and sub-project, plus a user.
- # Also, create 2 roles.
- 'entities': {
- 'domains': {'id': DEFAULT_DOMAIN_ID, 'users': 1,
- 'projects': {'project': 1}},
- 'roles': 2},
- # A direct role and an inherited role on the parent
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] - should get back
- # one direct role plus one inherited role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'project': 0}}]}
- ]
- }
-
- test_plan_with_os_inherit_disabled = {
- 'tests': [
- # List all effective assignments for user[0] - should only get
- # back the one direct role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0}]}
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=True)
- test_data = self.execute_assignment_test_plan(test_plan)
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Pass the existing test data in to allow execution of 2nd test plan
- self.execute_assignment_tests(
- test_plan_with_os_inherit_disabled, test_data)
-
- def test_list_projects_for_user_with_inherited_group_grants(self):
- """Test inherited group roles.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create two domains, each with two projects
- - Create a user and group
- - Make the user a member of the group
- - Assign a user role two projects, an inherited
- group role to one domain and an inherited regular role on
- the other domain
- - Get a list of projects for user, should return both pairs of projects
- from the domain, plus the one separate project
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain['id'], domain)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(domain2['id'], domain2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- self.resource_api.create_project(project1['id'], project1)
- project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
- self.resource_api.create_project(project2['id'], project2)
- project3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain2['id']}
- self.resource_api.create_project(project3['id'], project3)
- project4 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain2['id']}
- self.resource_api.create_project(project4['id'], project4)
- user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
- user1 = self.identity_api.create_user(user1)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
- group1 = self.identity_api.create_group(group1)
- self.identity_api.add_user_to_group(user1['id'], group1['id'])
-
- # Create 4 grants:
- # - one user grant on a project in domain2
- # - one user grant on a project in the default domain
- # - one inherited user grant on domain
- # - one inherited group grant on domain2
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=project3['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user1['id'],
- domain_id=domain['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=domain2['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Should get back all five projects, but without a duplicate for
- # project3 (since it has both a direct user role and an inherited role)
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertEqual(5, len(user_projects))
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemeted to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with a 1 project, plus a second domain with 2 projects,
- # as well as a user & group and a 3rd domain with 2 projects.
- # Also, created 2 roles.
- 'entities': {'domains': [{'projects': 1},
- {'users': 1, 'groups': 1, 'projects': 2},
- {'projects': 2}],
- 'roles': 2},
- 'group_memberships': [{'group': 0, 'users': [0]}],
- 'assignments': [{'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 0, 'project': 3},
- {'user': 0, 'role': 1, 'domain': 1,
- 'inherited_to_projects': True},
- {'user': 0, 'role': 1, 'domain': 2,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0]
- # Should get back both direct roles plus roles on both projects
- # from each domain. Duplicates should not be fitered out.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 3},
- {'user': 0, 'role': 0, 'project': 0},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'domain': 1}},
- {'user': 0, 'role': 1, 'project': 2,
- 'indirect': {'domain': 1}},
- {'user': 0, 'role': 1, 'project': 3,
- 'indirect': {'domain': 2}},
- {'user': 0, 'role': 1, 'project': 4,
- 'indirect': {'domain': 2}}]}
- ]
- }
- self.execute_assignment_test_plan(test_plan)
-
- def test_list_projects_for_user_with_inherited_group_project_grants(self):
- """Test inherited role assignments for groups on nested projects.
-
- Test Plan:
-
- - Enable OS-INHERIT extension
- - Create a hierarchy of projects with one root and one leaf project
- - Assign an inherited group role on root project
- - Assign a non-inherited group role on root project
- - Get a list of projects for user, should return both projects
- - Disable OS-INHERIT extension
- - Get a list of projects for user, should return only root project
-
- """
- self.config_fixture.config(group='os_inherit', enabled=True)
- root_project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(root_project['id'], root_project)
- leaf_project = {'id': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'enabled': True,
- 'name': uuid.uuid4().hex,
- 'parent_id': root_project['id'],
- 'is_domain': False}
- self.resource_api.create_project(leaf_project['id'], leaf_project)
-
- user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True}
- user = self.identity_api.create_user(user)
-
- group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
- group = self.identity_api.create_group(group)
- self.identity_api.add_user_to_group(user['id'], group['id'])
-
- # Grant inherited group role
- self.assignment_api.create_grant(group_id=group['id'],
- project_id=root_project['id'],
- role_id=self.role_admin['id'],
- inherited_to_projects=True)
- # Grant non-inherited group role
- self.assignment_api.create_grant(group_id=group['id'],
- project_id=root_project['id'],
- role_id=self.role_member['id'])
- # Should get back both projects: because the direct role assignment for
- # the root project and inherited role assignment for leaf project
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(2, len(user_projects))
- self.assertIn(root_project, user_projects)
- self.assertIn(leaf_project, user_projects)
-
- # Disable OS-INHERIT extension
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Should get back just root project - due the direct role assignment
- user_projects = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual(1, len(user_projects))
- self.assertIn(root_project, user_projects)
-
- # TODO(henry-nash): The test above uses list_projects_for_user
- # which may, in a subsequent patch, be re-implemeted to call
- # list_role_assignments and then report only the distinct projects.
- #
- # The test plan below therefore mirrors this test, to ensure that
- # list_role_assignments works the same. Once list_projects_for_user
- # has been re-implemented then the manual tests above can be
- # refactored.
- test_plan = {
- # A domain with a project ans sub-project, plus a user.
- # Also, create 2 roles.
- 'entities': {
- 'domains': {'id': DEFAULT_DOMAIN_ID, 'users': 1, 'groups': 1,
- 'projects': {'project': 1}},
- 'roles': 2},
- 'group_memberships': [{'group': 0, 'users': [0]}],
- # A direct role and an inherited role on the parent
- 'assignments': [{'group': 0, 'role': 0, 'project': 0},
- {'group': 0, 'role': 1, 'project': 0,
- 'inherited_to_projects': True}],
- 'tests': [
- # List all effective assignments for user[0] - should get back
- # one direct role plus one inherited role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0,
- 'indirect': {'group': 0}},
- {'user': 0, 'role': 1, 'project': 1,
- 'indirect': {'group': 0, 'project': 0}}]}
- ]
- }
-
- test_plan_with_os_inherit_disabled = {
- 'tests': [
- # List all effective assignments for user[0] - should only get
- # back the one direct role.
- {'params': {'user': 0, 'effective': True},
- 'results': [{'user': 0, 'role': 0, 'project': 0,
- 'indirect': {'group': 0}}]}
- ]
- }
- self.config_fixture.config(group='os_inherit', enabled=True)
- test_data = self.execute_assignment_test_plan(test_plan)
- self.config_fixture.config(group='os_inherit', enabled=False)
- # Pass the existing test data in to allow execution of 2nd test plan
- self.execute_assignment_tests(
- test_plan_with_os_inherit_disabled, test_data)
-
-
-class FilterTests(filtering.FilterTests):
- def test_list_entities_filtered(self):
- for entity in ['user', 'group', 'project']:
- # Create 20 entities
- entity_list = self._create_test_data(entity, 20)
-
- # Try filtering to get one an exact item out of the list
- hints = driver_hints.Hints()
- hints.add_filter('name', entity_list[10]['name'])
- entities = self._list_entities(entity)(hints=hints)
- self.assertEqual(1, len(entities))
- self.assertEqual(entities[0]['id'], entity_list[10]['id'])
- # Check the driver has removed the filter from the list hints
- self.assertFalse(hints.get_exact_filter_by_name('name'))
- self._delete_test_data(entity, entity_list)
-
- def test_list_users_inexact_filtered(self):
- # Create 20 users, some with specific names. We set the names at create
- # time (rather than updating them), since the LDAP driver does not
- # support name updates.
- user_name_data = {
- # user index: name for user
- 5: 'The',
- 6: 'The Ministry',
- 7: 'The Ministry of',
- 8: 'The Ministry of Silly',
- 9: 'The Ministry of Silly Walks',
- # ...and one for useful case insensitivity testing
- 10: 'The ministry of silly walks OF'
- }
- user_list = self._create_test_data(
- 'user', 20, domain_id=DEFAULT_DOMAIN_ID, name_dict=user_name_data)
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'ministry', comparator='contains')
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual(5, len(users))
- self._match_with_list(users, user_list,
- list_start=6, list_end=11)
- # TODO(henry-nash) Check inexact filter has been removed.
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'The', comparator='startswith')
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual(6, len(users))
- self._match_with_list(users, user_list,
- list_start=5, list_end=11)
- # TODO(henry-nash) Check inexact filter has been removed.
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'of', comparator='endswith')
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual(2, len(users))
- # We can't assume we will get back the users in any particular order
- self.assertIn(user_list[7]['id'], [users[0]['id'], users[1]['id']])
- self.assertIn(user_list[10]['id'], [users[0]['id'], users[1]['id']])
- # TODO(henry-nash) Check inexact filter has been removed.
-
- # TODO(henry-nash): Add some case sensitive tests. However,
- # these would be hard to validate currently, since:
- #
- # For SQL, the issue is that MySQL 0.7, by default, is installed in
- # case insensitive mode (which is what is run by default for our
- # SQL backend tests). For production deployments. OpenStack
- # assumes a case sensitive database. For these tests, therefore, we
- # need to be able to check the sensitivity of the database so as to
- # know whether to run case sensitive tests here.
- #
- # For LDAP/AD, although dependent on the schema being used, attributes
- # are typically configured to be case aware, but not case sensitive.
-
- self._delete_test_data('user', user_list)
-
- def test_groups_for_user_filtered(self):
- """Test use of filtering doesn't break groups_for_user listing.
-
- Some backends may use filtering to achieve the list of groups for a
- user, so test that it can combine a second filter.
-
- Test Plan:
-
- - Create 10 groups, some with names we can filter on
- - Create 2 users
- - Assign 1 of those users to most of the groups, including some of the
- well known named ones
- - Assign the other user to other groups as spoilers
- - Ensure that when we list groups for users with a filter on the group
- name, both restrictions have been enforced on what is returned.
-
- """
-
- number_of_groups = 10
- group_name_data = {
- # entity index: name for entity
- 5: 'The',
- 6: 'The Ministry',
- 9: 'The Ministry of Silly Walks',
- }
- group_list = self._create_test_data(
- 'group', number_of_groups,
- domain_id=DEFAULT_DOMAIN_ID, name_dict=group_name_data)
- user_list = self._create_test_data('user', 2)
-
- for group in range(7):
- # Create membership, including with two out of the three groups
- # with well know names
- self.identity_api.add_user_to_group(user_list[0]['id'],
- group_list[group]['id'])
- # ...and some spoiler memberships
- for group in range(7, number_of_groups):
- self.identity_api.add_user_to_group(user_list[1]['id'],
- group_list[group]['id'])
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'The', comparator='startswith')
- groups = self.identity_api.list_groups_for_user(
- user_list[0]['id'], hints=hints)
- # We should only get back 2 out of the 3 groups that start with 'The'
- # hence showing that both "filters" have been applied
- self.assertThat(len(groups), matchers.Equals(2))
- self.assertIn(group_list[5]['id'], [groups[0]['id'], groups[1]['id']])
- self.assertIn(group_list[6]['id'], [groups[0]['id'], groups[1]['id']])
- self._delete_test_data('user', user_list)
- self._delete_test_data('group', group_list)
-
- def _get_user_name_field_size(self):
- """Return the size of the user name field for the backend.
-
- Subclasses can override this method to indicate that the user name
- field is limited in length. The user name is the field used in the test
- that validates that a filter value works even if it's longer than a
- field.
-
- If the backend doesn't limit the value length then return None.
-
- """
- return None
-
- def test_filter_value_wider_than_field(self):
- # If a filter value is given that's larger than the field in the
- # backend then no values are returned.
-
- user_name_field_size = self._get_user_name_field_size()
-
- if user_name_field_size is None:
- # The backend doesn't limit the size of the user name, so pass this
- # test.
- return
-
- # Create some users just to make sure would return something if the
- # filter was ignored.
- self._create_test_data('user', 2)
-
- hints = driver_hints.Hints()
- value = 'A' * (user_name_field_size + 1)
- hints.add_filter('name', value)
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual([], users)
-
- def test_list_users_in_group_filtered(self):
- number_of_users = 10
- user_name_data = {
- 1: 'Arthur Conan Doyle',
- 3: 'Arthur Rimbaud',
- 9: 'Arthur Schopenhauer',
- }
- user_list = self._create_test_data(
- 'user', number_of_users,
- domain_id=DEFAULT_DOMAIN_ID, name_dict=user_name_data)
- group = self._create_one_entity('group',
- DEFAULT_DOMAIN_ID, 'Great Writers')
- for i in range(7):
- self.identity_api.add_user_to_group(user_list[i]['id'],
- group['id'])
-
- hints = driver_hints.Hints()
- hints.add_filter('name', 'Arthur', comparator='startswith')
- users = self.identity_api.list_users_in_group(group['id'], hints=hints)
- self.assertThat(len(users), matchers.Equals(2))
- self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']])
- self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']])
- self._delete_test_data('user', user_list)
- self._delete_entity('group')(group['id'])
-
-
-class LimitTests(filtering.FilterTests):
- ENTITIES = ['user', 'group', 'project']
-
- def setUp(self):
- """Setup for Limit Test Cases."""
-
- self.domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
- self.resource_api.create_domain(self.domain1['id'], self.domain1)
- self.addCleanup(self.clean_up_domain)
-
- self.entity_lists = {}
- self.domain1_entity_lists = {}
-
- for entity in self.ENTITIES:
- # Create 20 entities, 14 of which are in domain1
- self.entity_lists[entity] = self._create_test_data(entity, 6)
- self.domain1_entity_lists[entity] = self._create_test_data(
- entity, 14, self.domain1['id'])
- self.addCleanup(self.clean_up_entities)
-
- def clean_up_domain(self):
- """Clean up domain test data from Limit Test Cases."""
-
- self.domain1['enabled'] = False
- self.resource_api.update_domain(self.domain1['id'], self.domain1)
- self.resource_api.delete_domain(self.domain1['id'])
- del self.domain1
-
- def clean_up_entities(self):
- """Clean up entity test data from Limit Test Cases."""
- for entity in self.ENTITIES:
- self._delete_test_data(entity, self.entity_lists[entity])
- self._delete_test_data(entity, self.domain1_entity_lists[entity])
- del self.entity_lists
- del self.domain1_entity_lists
-
- def _test_list_entity_filtered_and_limited(self, entity):
- self.config_fixture.config(list_limit=10)
- # Should get back just 10 entities in domain1
- hints = driver_hints.Hints()
- hints.add_filter('domain_id', self.domain1['id'])
- entities = self._list_entities(entity)(hints=hints)
- self.assertEqual(hints.limit['limit'], len(entities))
- self.assertTrue(hints.limit['truncated'])
- self._match_with_list(entities, self.domain1_entity_lists[entity])
-
- # Override with driver specific limit
- if entity == 'project':
- self.config_fixture.config(group='resource', list_limit=5)
- else:
- self.config_fixture.config(group='identity', list_limit=5)
-
- # Should get back just 5 users in domain1
- hints = driver_hints.Hints()
- hints.add_filter('domain_id', self.domain1['id'])
- entities = self._list_entities(entity)(hints=hints)
- self.assertEqual(hints.limit['limit'], len(entities))
- self._match_with_list(entities, self.domain1_entity_lists[entity])
-
- # Finally, let's pretend we want to get the full list of entities,
- # even with the limits set, as part of some internal calculation.
- # Calling the API without a hints list should achieve this, and
- # return at least the 20 entries we created (there may be other
- # entities lying around created by other tests/setup).
- entities = self._list_entities(entity)()
- self.assertTrue(len(entities) >= 20)
-
- def test_list_users_filtered_and_limited(self):
- self._test_list_entity_filtered_and_limited('user')
-
- def test_list_groups_filtered_and_limited(self):
- self._test_list_entity_filtered_and_limited('group')
-
- def test_list_projects_filtered_and_limited(self):
- self._test_list_entity_filtered_and_limited('project')
diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
deleted file mode 100644
index f72cad63..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
+++ /dev/null
@@ -1,249 +0,0 @@
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from six.moves import range
-from testtools import matchers
-
-from keystone import exception
-from keystone.tests import unit
-
-
-class PolicyAssociationTests(object):
-
- def _assert_correct_policy(self, endpoint, policy):
- ref = (
- self.endpoint_policy_api.get_policy_for_endpoint(endpoint['id']))
- self.assertEqual(policy['id'], ref['id'])
-
- def _assert_correct_endpoints(self, policy, endpoint_list):
- endpoint_id_list = [ep['id'] for ep in endpoint_list]
- endpoints = (
- self.endpoint_policy_api.list_endpoints_for_policy(policy['id']))
- self.assertThat(endpoints, matchers.HasLength(len(endpoint_list)))
- for endpoint in endpoints:
- self.assertIn(endpoint['id'], endpoint_id_list)
-
- def load_sample_data(self):
- """Create sample data to test policy associations.
-
- The following data is created:
-
- - 3 regions, in a hierarchy, 0 -> 1 -> 2 (where 0 is top)
- - 3 services
- - 6 endpoints, 2 in each region, with a mixture of services:
- 0 - region 0, Service 0
- 1 - region 0, Service 1
- 2 - region 1, Service 1
- 3 - region 1, Service 2
- 4 - region 2, Service 2
- 5 - region 2, Service 0
-
- """
- def new_endpoint(region_id, service_id):
- endpoint = unit.new_endpoint_ref(interface='test',
- region_id=region_id,
- service_id=service_id,
- url='/url')
- self.endpoint.append(self.catalog_api.create_endpoint(
- endpoint['id'], endpoint))
-
- self.policy = []
- self.endpoint = []
- self.service = []
- self.region = []
-
- parent_region_id = None
- for i in range(3):
- policy = unit.new_policy_ref()
- self.policy.append(self.policy_api.create_policy(policy['id'],
- policy))
- service = unit.new_service_ref()
- self.service.append(self.catalog_api.create_service(service['id'],
- service))
- region = unit.new_region_ref(parent_region_id=parent_region_id)
- # Link the regions together as a hierarchy, [0] at the top
- parent_region_id = region['id']
- self.region.append(self.catalog_api.create_region(region))
-
- new_endpoint(self.region[0]['id'], self.service[0]['id'])
- new_endpoint(self.region[0]['id'], self.service[1]['id'])
- new_endpoint(self.region[1]['id'], self.service[1]['id'])
- new_endpoint(self.region[1]['id'], self.service[2]['id'])
- new_endpoint(self.region[2]['id'], self.service[2]['id'])
- new_endpoint(self.region[2]['id'], self.service[0]['id'])
-
- def test_policy_to_endpoint_association_crud(self):
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
- self.endpoint_policy_api.check_policy_association(
- self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
- self.endpoint_policy_api.delete_policy_association(
- self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[0]['id'],
- endpoint_id=self.endpoint[0]['id'])
-
- def test_overwriting_policy_to_endpoint_association(self):
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
- self.endpoint_policy_api.create_policy_association(
- self.policy[1]['id'], endpoint_id=self.endpoint[0]['id'])
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[0]['id'],
- endpoint_id=self.endpoint[0]['id'])
- self.endpoint_policy_api.check_policy_association(
- self.policy[1]['id'], endpoint_id=self.endpoint[0]['id'])
-
- def test_invalid_policy_to_endpoint_association(self):
- self.assertRaises(exception.InvalidPolicyAssociation,
- self.endpoint_policy_api.create_policy_association,
- self.policy[0]['id'])
- self.assertRaises(exception.InvalidPolicyAssociation,
- self.endpoint_policy_api.create_policy_association,
- self.policy[0]['id'],
- endpoint_id=self.endpoint[0]['id'],
- region_id=self.region[0]['id'])
- self.assertRaises(exception.InvalidPolicyAssociation,
- self.endpoint_policy_api.create_policy_association,
- self.policy[0]['id'],
- endpoint_id=self.endpoint[0]['id'],
- service_id=self.service[0]['id'])
- self.assertRaises(exception.InvalidPolicyAssociation,
- self.endpoint_policy_api.create_policy_association,
- self.policy[0]['id'],
- region_id=self.region[0]['id'])
-
- def test_policy_to_explicit_endpoint_association(self):
- # Associate policy 0 with endpoint 0
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
- self._assert_correct_policy(self.endpoint[0], self.policy[0])
- self._assert_correct_endpoints(self.policy[0], [self.endpoint[0]])
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.get_policy_for_endpoint,
- uuid.uuid4().hex)
-
- def test_policy_to_service_association(self):
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], service_id=self.service[0]['id'])
- self.endpoint_policy_api.create_policy_association(
- self.policy[1]['id'], service_id=self.service[1]['id'])
-
- # Endpoints 0 and 5 are part of service 0
- self._assert_correct_policy(self.endpoint[0], self.policy[0])
- self._assert_correct_policy(self.endpoint[5], self.policy[0])
- self._assert_correct_endpoints(
- self.policy[0], [self.endpoint[0], self.endpoint[5]])
-
- # Endpoints 1 and 2 are part of service 1
- self._assert_correct_policy(self.endpoint[1], self.policy[1])
- self._assert_correct_policy(self.endpoint[2], self.policy[1])
- self._assert_correct_endpoints(
- self.policy[1], [self.endpoint[1], self.endpoint[2]])
-
- def test_policy_to_region_and_service_association(self):
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], service_id=self.service[0]['id'],
- region_id=self.region[0]['id'])
- self.endpoint_policy_api.create_policy_association(
- self.policy[1]['id'], service_id=self.service[1]['id'],
- region_id=self.region[1]['id'])
- self.endpoint_policy_api.create_policy_association(
- self.policy[2]['id'], service_id=self.service[2]['id'],
- region_id=self.region[2]['id'])
-
- # Endpoint 0 is in region 0 with service 0, so should get policy 0
- self._assert_correct_policy(self.endpoint[0], self.policy[0])
- # Endpoint 5 is in Region 2 with service 0, so should also get
- # policy 0 by searching up the tree to Region 0
- self._assert_correct_policy(self.endpoint[5], self.policy[0])
-
- # Looking the other way round, policy 2 should only be in use by
- # endpoint 4, since that's the only endpoint in region 2 with the
- # correct service
- self._assert_correct_endpoints(
- self.policy[2], [self.endpoint[4]])
- # Policy 1 should only be in use by endpoint 2, since that's the only
- # endpoint in region 1 (and region 2 below it) with the correct service
- self._assert_correct_endpoints(
- self.policy[1], [self.endpoint[2]])
- # Policy 0 should be in use by endpoint 0, as well as 5 (since 5 is
- # of the correct service and in region 2 below it)
- self._assert_correct_endpoints(
- self.policy[0], [self.endpoint[0], self.endpoint[5]])
-
- def test_delete_association_by_entity(self):
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'])
- self.endpoint_policy_api.delete_association_by_endpoint(
- self.endpoint[0]['id'])
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[0]['id'],
- endpoint_id=self.endpoint[0]['id'])
- # Make sure deleting it again is silent - since this method is used
- # in response to notifications by the controller.
- self.endpoint_policy_api.delete_association_by_endpoint(
- self.endpoint[0]['id'])
-
- # Now try with service - ensure both combined region & service
- # associations and explicit service ones are removed
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], service_id=self.service[0]['id'],
- region_id=self.region[0]['id'])
- self.endpoint_policy_api.create_policy_association(
- self.policy[1]['id'], service_id=self.service[0]['id'],
- region_id=self.region[1]['id'])
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], service_id=self.service[0]['id'])
-
- self.endpoint_policy_api.delete_association_by_service(
- self.service[0]['id'])
-
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[0]['id'],
- service_id=self.service[0]['id'],
- region_id=self.region[0]['id'])
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[1]['id'],
- service_id=self.service[0]['id'],
- region_id=self.region[1]['id'])
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[0]['id'],
- service_id=self.service[0]['id'])
-
- # Finally, check delete by region
- self.endpoint_policy_api.create_policy_association(
- self.policy[0]['id'], service_id=self.service[0]['id'],
- region_id=self.region[0]['id'])
-
- self.endpoint_policy_api.delete_association_by_region(
- self.region[0]['id'])
-
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[0]['id'],
- service_id=self.service[0]['id'],
- region_id=self.region[0]['id'])
- self.assertRaises(exception.NotFound,
- self.endpoint_policy_api.check_policy_association,
- self.policy[0]['id'],
- service_id=self.service[0]['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py
deleted file mode 100644
index 134a03f0..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.common import sql
-from keystone.tests.unit import test_backend_endpoint_policy
-from keystone.tests.unit import test_backend_sql
-
-
-class SqlPolicyAssociationTable(test_backend_sql.SqlModels):
- """Set of tests for checking SQL Policy Association Mapping."""
-
- def test_policy_association_mapping(self):
- cols = (('id', sql.String, 64),
- ('policy_id', sql.String, 64),
- ('endpoint_id', sql.String, 64),
- ('service_id', sql.String, 64),
- ('region_id', sql.String, 64))
- self.assertExpectedSchema('policy_association', cols)
-
-
-class SqlPolicyAssociationTests(
- test_backend_sql.SqlTests,
- test_backend_endpoint_policy.PolicyAssociationTests):
-
- def load_fixtures(self, fixtures):
- super(SqlPolicyAssociationTests, self).load_fixtures(fixtures)
- self.load_sample_data()
diff --git a/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py b/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py
deleted file mode 100644
index 995c564d..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.common import sql
-from keystone.tests.unit import test_backend_sql
-
-
-class SqlFederation(test_backend_sql.SqlModels):
- """Set of tests for checking SQL Federation."""
-
- def test_identity_provider(self):
- cols = (('id', sql.String, 64),
- ('enabled', sql.Boolean, None),
- ('description', sql.Text, None))
- self.assertExpectedSchema('identity_provider', cols)
-
- def test_idp_remote_ids(self):
- cols = (('idp_id', sql.String, 64),
- ('remote_id', sql.String, 255))
- self.assertExpectedSchema('idp_remote_ids', cols)
-
- def test_federated_protocol(self):
- cols = (('id', sql.String, 64),
- ('idp_id', sql.String, 64),
- ('mapping_id', sql.String, 64))
- self.assertExpectedSchema('federation_protocol', cols)
-
- def test_mapping(self):
- cols = (('id', sql.String, 64),
- ('rules', sql.JsonBlob, None))
- self.assertExpectedSchema('mapping', cols)
-
- def test_service_provider(self):
- cols = (('auth_url', sql.String, 256),
- ('id', sql.String, 64),
- ('enabled', sql.Boolean, None),
- ('description', sql.Text, None),
- ('relay_state_prefix', sql.String, 256),
- ('sp_url', sql.String, 256))
- self.assertExpectedSchema('service_provider', cols)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py b/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py
deleted file mode 100644
index e6635e18..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py
+++ /dev/null
@@ -1,198 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from testtools import matchers
-
-from keystone.common import sql
-from keystone.identity.mapping_backends import mapping
-from keystone.tests import unit
-from keystone.tests.unit import identity_mapping as mapping_sql
-from keystone.tests.unit import test_backend_sql
-
-
-class SqlIDMappingTable(test_backend_sql.SqlModels):
- """Set of tests for checking SQL Identity ID Mapping."""
-
- def test_id_mapping(self):
- cols = (('public_id', sql.String, 64),
- ('domain_id', sql.String, 64),
- ('local_id', sql.String, 64),
- ('entity_type', sql.Enum, None))
- self.assertExpectedSchema('id_mapping', cols)
-
-
-class SqlIDMapping(test_backend_sql.SqlTests):
-
- def setUp(self):
- super(SqlIDMapping, self).setUp()
- self.load_sample_data()
-
- def load_sample_data(self):
- self.addCleanup(self.clean_sample_data)
- domainA = unit.new_domain_ref()
- self.domainA = self.resource_api.create_domain(domainA['id'], domainA)
- domainB = unit.new_domain_ref()
- self.domainB = self.resource_api.create_domain(domainB['id'], domainB)
-
- def clean_sample_data(self):
- if hasattr(self, 'domainA'):
- self.domainA['enabled'] = False
- self.resource_api.update_domain(self.domainA['id'], self.domainA)
- self.resource_api.delete_domain(self.domainA['id'])
- if hasattr(self, 'domainB'):
- self.domainB['enabled'] = False
- self.resource_api.update_domain(self.domainB['id'], self.domainB)
- self.resource_api.delete_domain(self.domainB['id'])
-
- def test_invalid_public_key(self):
- self.assertIsNone(self.id_mapping_api.get_id_mapping(uuid.uuid4().hex))
-
- def test_id_mapping_crud(self):
- initial_mappings = len(mapping_sql.list_id_mappings())
- local_id1 = uuid.uuid4().hex
- local_id2 = uuid.uuid4().hex
- local_entity1 = {'domain_id': self.domainA['id'],
- 'local_id': local_id1,
- 'entity_type': mapping.EntityType.USER}
- local_entity2 = {'domain_id': self.domainB['id'],
- 'local_id': local_id2,
- 'entity_type': mapping.EntityType.GROUP}
-
- # Check no mappings for the new local entities
- self.assertIsNone(self.id_mapping_api.get_public_id(local_entity1))
- self.assertIsNone(self.id_mapping_api.get_public_id(local_entity2))
-
- # Create the new mappings and then read them back
- public_id1 = self.id_mapping_api.create_id_mapping(local_entity1)
- public_id2 = self.id_mapping_api.create_id_mapping(local_entity2)
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings + 2))
- self.assertEqual(
- public_id1, self.id_mapping_api.get_public_id(local_entity1))
- self.assertEqual(
- public_id2, self.id_mapping_api.get_public_id(local_entity2))
-
- local_id_ref = self.id_mapping_api.get_id_mapping(public_id1)
- self.assertEqual(self.domainA['id'], local_id_ref['domain_id'])
- self.assertEqual(local_id1, local_id_ref['local_id'])
- self.assertEqual(mapping.EntityType.USER, local_id_ref['entity_type'])
- # Check we have really created a new external ID
- self.assertNotEqual(local_id1, public_id1)
-
- local_id_ref = self.id_mapping_api.get_id_mapping(public_id2)
- self.assertEqual(self.domainB['id'], local_id_ref['domain_id'])
- self.assertEqual(local_id2, local_id_ref['local_id'])
- self.assertEqual(mapping.EntityType.GROUP, local_id_ref['entity_type'])
- # Check we have really created a new external ID
- self.assertNotEqual(local_id2, public_id2)
-
- # Create another mappings, this time specifying a public ID to use
- new_public_id = uuid.uuid4().hex
- public_id3 = self.id_mapping_api.create_id_mapping(
- {'domain_id': self.domainB['id'], 'local_id': local_id2,
- 'entity_type': mapping.EntityType.USER},
- public_id=new_public_id)
- self.assertEqual(new_public_id, public_id3)
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings + 3))
-
- # Delete the mappings we created, and make sure the mapping count
- # goes back to where it was
- self.id_mapping_api.delete_id_mapping(public_id1)
- self.id_mapping_api.delete_id_mapping(public_id2)
- self.id_mapping_api.delete_id_mapping(public_id3)
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings))
-
- def test_id_mapping_handles_unicode(self):
- initial_mappings = len(mapping_sql.list_id_mappings())
- local_id = u'fäké1'
- local_entity = {'domain_id': self.domainA['id'],
- 'local_id': local_id,
- 'entity_type': mapping.EntityType.USER}
-
- # Check no mappings for the new local entity
- self.assertIsNone(self.id_mapping_api.get_public_id(local_entity))
-
- # Create the new mapping and then read it back
- public_id = self.id_mapping_api.create_id_mapping(local_entity)
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings + 1))
- self.assertEqual(
- public_id, self.id_mapping_api.get_public_id(local_entity))
-
- def test_delete_public_id_is_silent(self):
- # Test that deleting an invalid public key is silent
- self.id_mapping_api.delete_id_mapping(uuid.uuid4().hex)
-
- def test_purge_mappings(self):
- initial_mappings = len(mapping_sql.list_id_mappings())
- local_id1 = uuid.uuid4().hex
- local_id2 = uuid.uuid4().hex
- local_id3 = uuid.uuid4().hex
- local_id4 = uuid.uuid4().hex
- local_id5 = uuid.uuid4().hex
-
- # Create five mappings,two in domainA, three in domainB
- self.id_mapping_api.create_id_mapping(
- {'domain_id': self.domainA['id'], 'local_id': local_id1,
- 'entity_type': mapping.EntityType.USER})
- self.id_mapping_api.create_id_mapping(
- {'domain_id': self.domainA['id'], 'local_id': local_id2,
- 'entity_type': mapping.EntityType.USER})
- public_id3 = self.id_mapping_api.create_id_mapping(
- {'domain_id': self.domainB['id'], 'local_id': local_id3,
- 'entity_type': mapping.EntityType.GROUP})
- public_id4 = self.id_mapping_api.create_id_mapping(
- {'domain_id': self.domainB['id'], 'local_id': local_id4,
- 'entity_type': mapping.EntityType.USER})
- public_id5 = self.id_mapping_api.create_id_mapping(
- {'domain_id': self.domainB['id'], 'local_id': local_id5,
- 'entity_type': mapping.EntityType.USER})
-
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings + 5))
-
- # Purge mappings for domainA, should be left with those in B
- self.id_mapping_api.purge_mappings(
- {'domain_id': self.domainA['id']})
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings + 3))
- self.id_mapping_api.get_id_mapping(public_id3)
- self.id_mapping_api.get_id_mapping(public_id4)
- self.id_mapping_api.get_id_mapping(public_id5)
-
- # Purge mappings for type Group, should purge one more
- self.id_mapping_api.purge_mappings(
- {'entity_type': mapping.EntityType.GROUP})
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings + 2))
- self.id_mapping_api.get_id_mapping(public_id4)
- self.id_mapping_api.get_id_mapping(public_id5)
-
- # Purge mapping for a specific local identifier
- self.id_mapping_api.purge_mappings(
- {'domain_id': self.domainB['id'], 'local_id': local_id4,
- 'entity_type': mapping.EntityType.USER})
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings + 1))
- self.id_mapping_api.get_id_mapping(public_id5)
-
- # Purge mappings the remaining mappings
- self.id_mapping_api.purge_mappings({})
- self.assertThat(mapping_sql.list_id_mappings(),
- matchers.HasLength(initial_mappings))
diff --git a/keystone-moon/keystone/tests/unit/test_backend_kvs.py b/keystone-moon/keystone/tests/unit/test_backend_kvs.py
deleted file mode 100644
index 36af1c36..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_kvs.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-import datetime
-import uuid
-
-from oslo_utils import timeutils
-import six
-
-from keystone.common import utils
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit.token import test_backends as token_tests
-
-
-class KvsToken(unit.TestCase, token_tests.TokenTests):
- def setUp(self):
- super(KvsToken, self).setUp()
- self.load_backends()
-
- def test_flush_expired_token(self):
- self.assertRaises(
- exception.NotImplemented,
- self.token_provider_api._persistence.flush_expired_tokens)
-
- def _update_user_token_index_direct(self, user_key, token_id, new_data):
- persistence = self.token_provider_api._persistence
- token_list = persistence.driver._get_user_token_list_with_expiry(
- user_key)
- # Update the user-index so that the expires time is _actually_ expired
- # since we do not do an explicit get on the token, we only reference
- # the data in the user index (to save extra round-trips to the kvs
- # backend).
- for i, data in enumerate(token_list):
- if data[0] == token_id:
- token_list[i] = new_data
- break
- self.token_provider_api._persistence.driver._store.set(user_key,
- token_list)
-
- def test_cleanup_user_index_on_create(self):
- user_id = six.text_type(uuid.uuid4().hex)
- valid_token_id, data = self.create_token_sample_data(user_id=user_id)
- expired_token_id, expired_data = self.create_token_sample_data(
- user_id=user_id)
-
- expire_delta = datetime.timedelta(seconds=86400)
-
- # NOTE(morganfainberg): Directly access the data cache since we need to
- # get expired tokens as well as valid tokens.
- token_persistence = self.token_provider_api._persistence
- user_key = token_persistence.driver._prefix_user_id(user_id)
- user_token_list = token_persistence.driver._store.get(user_key)
- valid_token_ref = token_persistence.get_token(valid_token_id)
- expired_token_ref = token_persistence.get_token(expired_token_id)
- expected_user_token_list = [
- (valid_token_id, utils.isotime(valid_token_ref['expires'],
- subsecond=True)),
- (expired_token_id, utils.isotime(expired_token_ref['expires'],
- subsecond=True))]
- self.assertEqual(expected_user_token_list, user_token_list)
- new_expired_data = (expired_token_id,
- utils.isotime(
- (timeutils.utcnow() - expire_delta),
- subsecond=True))
- self._update_user_token_index_direct(user_key, expired_token_id,
- new_expired_data)
- valid_token_id_2, valid_data_2 = self.create_token_sample_data(
- user_id=user_id)
- valid_token_ref_2 = token_persistence.get_token(valid_token_id_2)
- expected_user_token_list = [
- (valid_token_id, utils.isotime(valid_token_ref['expires'],
- subsecond=True)),
- (valid_token_id_2, utils.isotime(valid_token_ref_2['expires'],
- subsecond=True))]
- user_token_list = token_persistence.driver._store.get(user_key)
- self.assertEqual(expected_user_token_list, user_token_list)
-
- # Test that revoked tokens are removed from the list on create.
- token_persistence.delete_token(valid_token_id_2)
- new_token_id, data = self.create_token_sample_data(user_id=user_id)
- new_token_ref = token_persistence.get_token(new_token_id)
- expected_user_token_list = [
- (valid_token_id, utils.isotime(valid_token_ref['expires'],
- subsecond=True)),
- (new_token_id, utils.isotime(new_token_ref['expires'],
- subsecond=True))]
- user_token_list = token_persistence.driver._store.get(user_key)
- self.assertEqual(expected_user_token_list, user_token_list)
-
-
-class KvsTokenCacheInvalidation(unit.TestCase,
- token_tests.TokenCacheInvalidation):
- def setUp(self):
- super(KvsTokenCacheInvalidation, self).setUp()
- self.useFixture(database.Database(self.sql_driver_version_overrides))
- self.load_backends()
- self._create_test_data()
-
- def config_overrides(self):
- super(KvsTokenCacheInvalidation, self).config_overrides()
- self.config_fixture.config(group='token', driver='kvs')
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap.py b/keystone-moon/keystone/tests/unit/test_backend_ldap.py
deleted file mode 100644
index cf618633..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_ldap.py
+++ /dev/null
@@ -1,3287 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2012 OpenStack Foundation
-# Copyright 2013 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-import ldap
-import mock
-from oslo_config import cfg
-from oslo_log import versionutils
-from oslotest import mockpatch
-import pkg_resources
-from six.moves import http_client
-from six.moves import range
-from testtools import matchers
-
-from keystone.common import cache
-from keystone.common import driver_hints
-from keystone.common import ldap as common_ldap
-from keystone.common.ldap import core as common_ldap_core
-from keystone import exception
-from keystone import identity
-from keystone.identity.mapping_backends import mapping as map
-from keystone import resource
-from keystone.tests import unit
-from keystone.tests.unit.assignment import test_backends as assignment_tests
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.identity import test_backends as identity_tests
-from keystone.tests.unit import identity_mapping as mapping_sql
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit.ksfixtures import ldapdb
-from keystone.tests.unit.resource import test_backends as resource_tests
-from keystone.tests.unit.utils import wip
-
-
-CONF = cfg.CONF
-
-
-def _assert_backends(testcase, **kwargs):
-
- def _get_backend_cls(testcase, subsystem):
- observed_backend = getattr(testcase, subsystem + '_api').driver
- return observed_backend.__class__
-
- def _get_domain_specific_backend_cls(manager, domain):
- observed_backend = manager.domain_configs.get_domain_driver(domain)
- return observed_backend.__class__
-
- def _get_entrypoint_cls(subsystem, name):
- entrypoint = entrypoint_map['keystone.' + subsystem][name]
- return entrypoint.resolve()
-
- def _load_domain_specific_configs(manager):
- if (not manager.domain_configs.configured and
- CONF.identity.domain_specific_drivers_enabled):
- manager.domain_configs.setup_domain_drivers(
- manager.driver, manager.resource_api)
-
- def _assert_equal(expected_cls, observed_cls, subsystem,
- domain=None):
- msg = ('subsystem %(subsystem)s expected %(expected_cls)r, '
- 'but observed %(observed_cls)r')
- if domain:
- subsystem = '%s[domain=%s]' % (subsystem, domain)
- assert expected_cls == observed_cls, msg % {
- 'expected_cls': expected_cls,
- 'observed_cls': observed_cls,
- 'subsystem': subsystem,
- }
-
- env = pkg_resources.Environment()
- keystone_dist = env['keystone'][0]
- entrypoint_map = pkg_resources.get_entry_map(keystone_dist)
-
- for subsystem, entrypoint_name in kwargs.items():
- if isinstance(entrypoint_name, str):
- observed_cls = _get_backend_cls(testcase, subsystem)
- expected_cls = _get_entrypoint_cls(subsystem, entrypoint_name)
- _assert_equal(expected_cls, observed_cls, subsystem)
-
- elif isinstance(entrypoint_name, dict):
- manager = getattr(testcase, subsystem + '_api')
- _load_domain_specific_configs(manager)
-
- for domain, entrypoint_name in entrypoint_name.items():
- if domain is None:
- observed_cls = _get_backend_cls(testcase, subsystem)
- expected_cls = _get_entrypoint_cls(
- subsystem, entrypoint_name)
- _assert_equal(expected_cls, observed_cls, subsystem)
- continue
-
- observed_cls = _get_domain_specific_backend_cls(
- manager, domain)
- expected_cls = _get_entrypoint_cls(subsystem, entrypoint_name)
- _assert_equal(expected_cls, observed_cls, subsystem, domain)
-
- else:
- raise ValueError('%r is not an expected value for entrypoint name'
- % entrypoint_name)
-
-
-def create_group_container(identity_api):
- # Create the groups base entry (ou=Groups,cn=example,cn=com)
- group_api = identity_api.driver.group
- conn = group_api.get_connection()
- dn = 'ou=Groups,cn=example,cn=com'
- conn.add_s(dn, [('objectclass', ['organizationalUnit']),
- ('ou', ['Groups'])])
-
-
-class BaseLDAPIdentity(identity_tests.IdentityTests,
- assignment_tests.AssignmentTests,
- resource_tests.ResourceTests):
-
- def setUp(self):
- super(BaseLDAPIdentity, self).setUp()
- self.ldapdb = self.useFixture(ldapdb.LDAPDatabase())
-
- self.load_backends()
- self.load_fixtures(default_fixtures)
- self.config_fixture.config(group='os_inherit', enabled=False)
-
- def _get_domain_fixture(self):
- """Domains in LDAP are read-only, so just return the static one."""
- return self.resource_api.get_domain(CONF.identity.default_domain_id)
-
- def get_config(self, domain_id):
- # Only one conf structure unless we are using separate domain backends
- return CONF
-
- def config_overrides(self):
- super(BaseLDAPIdentity, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
-
- def config_files(self):
- config_files = super(BaseLDAPIdentity, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- def new_user_ref(self, domain_id, project_id=None, **kwargs):
- ref = unit.new_user_ref(domain_id=domain_id, project_id=project_id,
- **kwargs)
- if 'id' not in kwargs:
- del ref['id']
- return ref
-
- def get_user_enabled_vals(self, user):
- user_dn = (
- self.identity_api.driver.user._id_to_dn_string(user['id']))
- enabled_attr_name = CONF.ldap.user_enabled_attribute
-
- ldap_ = self.identity_api.driver.user.get_connection()
- res = ldap_.search_s(user_dn,
- ldap.SCOPE_BASE,
- u'(sn=%s)' % user['name'])
- if enabled_attr_name in res[0][1]:
- return res[0][1][enabled_attr_name]
- else:
- return None
-
- def test_build_tree(self):
- """Regression test for building the tree names."""
- user_api = identity.backends.ldap.UserApi(CONF)
- self.assertTrue(user_api)
- self.assertEqual("ou=Users,%s" % CONF.ldap.suffix, user_api.tree_dn)
-
- def test_configurable_allowed_user_actions(self):
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.identity_api.get_user(user['id'])
-
- user['password'] = u'fäképass2'
- self.identity_api.update_user(user['id'], user)
-
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- user['id'])
-
- def test_configurable_forbidden_user_actions(self):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.allow_create = False
- driver.user.allow_update = False
- driver.user.allow_delete = False
-
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ForbiddenAction,
- self.identity_api.create_user,
- user)
-
- self.user_foo['password'] = u'fäképass2'
- self.assertRaises(exception.ForbiddenAction,
- self.identity_api.update_user,
- self.user_foo['id'],
- self.user_foo)
-
- self.assertRaises(exception.ForbiddenAction,
- self.identity_api.delete_user,
- self.user_foo['id'])
-
- def test_configurable_forbidden_create_existing_user(self):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.allow_create = False
-
- self.assertRaises(exception.ForbiddenAction,
- self.identity_api.create_user,
- self.user_foo)
-
- def test_user_filter(self):
- user_ref = self.identity_api.get_user(self.user_foo['id'])
- self.user_foo.pop('password')
- self.assertDictEqual(self.user_foo, user_ref)
-
- driver = self.identity_api._select_identity_driver(
- user_ref['domain_id'])
- driver.user.ldap_filter = '(CN=DOES_NOT_MATCH)'
- # invalidate the cache if the result is cached.
- self.identity_api.get_user.invalidate(self.identity_api,
- self.user_foo['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- self.user_foo['id'])
-
- def test_list_users_by_name_and_with_filter(self):
- # confirm that the user is not exposed when it does not match the
- # filter setting in conf even if it is requested by name in user list
- hints = driver_hints.Hints()
- hints.add_filter('name', self.user_foo['name'])
- domain_id = self.user_foo['domain_id']
- driver = self.identity_api._select_identity_driver(domain_id)
- driver.user.ldap_filter = ('(|(cn=%s)(cn=%s))' %
- (self.user_sna['id'], self.user_two['id']))
- users = self.identity_api.list_users(
- domain_scope=self._set_domain_scope(domain_id),
- hints=hints)
- self.assertEqual(0, len(users))
-
- def test_remove_role_grant_from_user_and_project(self):
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'])
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- user_id=self.user_foo['id'],
- project_id=self.tenant_baz['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_project(self):
- new_domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=new_domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_user = self.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual([], roles_ref)
- self.assertEqual(0, len(roles_ref))
-
- self.assignment_api.create_grant(group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertNotEmpty(roles_ref)
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.RoleAssignmentNotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- project_id=self.tenant_bar['id'],
- role_id='member')
-
- def test_get_and_remove_role_grant_by_group_and_domain(self):
- # TODO(henry-nash): We should really rewrite the tests in
- # unit.resource.test_backends to be more flexible as to where the
- # domains are sourced from, so that we would not need to override such
- # tests here. This is raised as bug 1373865.
- new_domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=new_domain['id'],)
- new_group = self.identity_api.create_group(new_group)
- new_user = self.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
-
- self.assignment_api.create_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(self.role_member, roles_ref[0])
-
- self.assignment_api.delete_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.NotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
-
- def test_get_role_assignment_by_domain_not_found(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_del_role_assignment_by_domain_not_found(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_get_and_remove_role_grant_by_user_and_domain(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_get_and_remove_correct_role_grant_from_a_mix(self):
- self.skipTest('Blocked by bug 1101287')
-
- def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_role_grant_by_group_and_cross_domain_project(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_role_grant_by_user_and_cross_domain_project(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_multi_role_grant_by_user_group_on_project_domain(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_delete_role_with_user_and_group_grants(self):
- self.skipTest('Blocked by bug 1101287')
-
- def test_delete_user_with_group_project_domain_links(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_delete_group_with_user_project_domain_links(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_list_role_assignment_containing_names(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_list_projects_for_user(self):
- domain = self._get_domain_fixture()
- user1 = self.new_user_ref(domain_id=domain['id'])
- user1 = self.identity_api.create_user(user1)
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertThat(user_projects, matchers.HasLength(0))
-
- # new grant(user1, role_member, tenant_bar)
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- # new grant(user1, role_member, tenant_baz)
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_baz['id'],
- role_id=self.role_member['id'])
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertThat(user_projects, matchers.HasLength(2))
-
- # Now, check number of projects through groups
- user2 = self.new_user_ref(domain_id=domain['id'])
- user2 = self.identity_api.create_user(user2)
-
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
-
- self.identity_api.add_user_to_group(user2['id'], group1['id'])
-
- # new grant(group1(user2), role_member, tenant_bar)
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- # new grant(group1(user2), role_member, tenant_baz)
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=self.tenant_baz['id'],
- role_id=self.role_member['id'])
- user_projects = self.assignment_api.list_projects_for_user(user2['id'])
- self.assertThat(user_projects, matchers.HasLength(2))
-
- # new grant(group1(user2), role_other, tenant_bar)
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_other['id'])
- user_projects = self.assignment_api.list_projects_for_user(user2['id'])
- self.assertThat(user_projects, matchers.HasLength(2))
-
- def test_list_projects_for_user_and_groups(self):
- domain = self._get_domain_fixture()
- # Create user1
- user1 = self.new_user_ref(domain_id=domain['id'])
- user1 = self.identity_api.create_user(user1)
-
- # Create new group for user1
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
-
- # Add user1 to group1
- self.identity_api.add_user_to_group(user1['id'], group1['id'])
-
- # Now, add grant to user1 and group1 in tenant_bar
- self.assignment_api.create_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
-
- # The result is user1 has only one project granted
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertThat(user_projects, matchers.HasLength(1))
-
- # Now, delete user1 grant into tenant_bar and check
- self.assignment_api.delete_grant(user_id=user1['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
-
- # The result is user1 has only one project granted.
- # Granted through group1.
- user_projects = self.assignment_api.list_projects_for_user(user1['id'])
- self.assertThat(user_projects, matchers.HasLength(1))
-
- def test_list_projects_for_user_with_grants(self):
- domain = self._get_domain_fixture()
- new_user = self.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
-
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
- group2 = unit.new_group_ref(domain_id=domain['id'])
- group2 = self.identity_api.create_group(group2)
-
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
-
- self.identity_api.add_user_to_group(new_user['id'],
- group1['id'])
- self.identity_api.add_user_to_group(new_user['id'],
- group2['id'])
-
- self.assignment_api.create_grant(user_id=new_user['id'],
- project_id=self.tenant_bar['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=new_user['id'],
- project_id=project1['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- project_id=project2['id'],
- role_id=self.role_admin['id'])
-
- user_projects = self.assignment_api.list_projects_for_user(
- new_user['id'])
- self.assertEqual(3, len(user_projects))
-
- def test_create_duplicate_user_name_in_different_domains(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_create_duplicate_project_name_in_different_domains(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_create_duplicate_group_name_in_different_domains(self):
- self.skipTest(
- 'N/A: LDAP does not support multiple domains')
-
- def test_move_user_between_domains(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_move_user_between_domains_with_clashing_names_fails(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_move_group_between_domains(self):
- self.skipTest(
- 'N/A: LDAP does not support multiple domains')
-
- def test_move_group_between_domains_with_clashing_names_fails(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_move_project_between_domains(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_move_project_between_domains_with_clashing_names_fails(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_get_roles_for_user_and_domain(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_get_roles_for_groups_on_domain(self):
- self.skipTest('Blocked by bug: 1390125')
-
- def test_get_roles_for_groups_on_project(self):
- self.skipTest('Blocked by bug: 1390125')
-
- def test_list_domains_for_groups(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
-
- def test_list_projects_for_groups(self):
- self.skipTest('Blocked by bug: 1390125')
-
- def test_domain_delete_hierarchy(self):
- self.skipTest('Domains are read-only against LDAP')
-
- def test_list_role_assignments_unfiltered(self):
- new_domain = self._get_domain_fixture()
- new_user = self.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- new_group = unit.new_group_ref(domain_id=new_domain['id'])
- new_group = self.identity_api.create_group(new_group)
- new_project = unit.new_project_ref(domain_id=new_domain['id'])
- self.resource_api.create_project(new_project['id'], new_project)
-
- # First check how many role grant already exist
- existing_assignments = len(self.assignment_api.list_role_assignments())
-
- self.assignment_api.create_grant(user_id=new_user['id'],
- project_id=new_project['id'],
- role_id='other')
- self.assignment_api.create_grant(group_id=new_group['id'],
- project_id=new_project['id'],
- role_id='admin')
-
- # Read back the list of assignments - check it is gone up by 2
- after_assignments = len(self.assignment_api.list_role_assignments())
- self.assertEqual(existing_assignments + 2, after_assignments)
-
- def test_list_role_assignments_dumb_member(self):
- self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- new_domain = self._get_domain_fixture()
- new_user = self.new_user_ref(domain_id=new_domain['id'])
- new_user = self.identity_api.create_user(new_user)
- new_project = unit.new_project_ref(domain_id=new_domain['id'])
- self.resource_api.create_project(new_project['id'], new_project)
- self.assignment_api.create_grant(user_id=new_user['id'],
- project_id=new_project['id'],
- role_id='other')
-
- # Read back the list of assignments and ensure
- # that the LDAP dumb member isn't listed.
- assignment_ids = [a['user_id'] for a in
- self.assignment_api.list_role_assignments()]
- dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
- self.assertNotIn(dumb_id, assignment_ids)
-
- def test_list_user_ids_for_project_dumb_member(self):
- self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- user['id'])
- user_ids = self.assignment_api.list_user_ids_for_project(
- self.tenant_baz['id'])
-
- self.assertIn(user['id'], user_ids)
-
- dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
- self.assertNotIn(dumb_id, user_ids)
-
- def test_multi_group_grants_on_project_domain(self):
- self.skipTest('Blocked by bug 1101287')
-
- def test_list_group_members_missing_entry(self):
- """List group members with deleted user.
-
- If a group has a deleted entry for a member, the non-deleted members
- are returned.
-
- """
- # Create a group
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group_id = self.identity_api.create_group(group)['id']
-
- # Create a couple of users and add them to the group.
- user = dict(name=uuid.uuid4().hex,
- domain_id=CONF.identity.default_domain_id)
- user_1_id = self.identity_api.create_user(user)['id']
-
- self.identity_api.add_user_to_group(user_1_id, group_id)
-
- user = dict(name=uuid.uuid4().hex,
- domain_id=CONF.identity.default_domain_id)
- user_2_id = self.identity_api.create_user(user)['id']
-
- self.identity_api.add_user_to_group(user_2_id, group_id)
-
- # Delete user 2
- # NOTE(blk-u): need to go directly to user interface to keep from
- # updating the group.
- unused, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(user_2_id))
- driver.user.delete(entity_id)
-
- # List group users and verify only user 1.
- res = self.identity_api.list_users_in_group(group_id)
-
- self.assertEqual(1, len(res), "Expected 1 entry (user_1)")
- self.assertEqual(user_1_id, res[0]['id'], "Expected user 1 id")
-
- def test_list_group_members_when_no_members(self):
- # List group members when there is no member in the group.
- # No exception should be raised.
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
-
- # If this doesn't raise, then the test is successful.
- self.identity_api.list_users_in_group(group['id'])
-
- def test_list_group_members_dumb_member(self):
- self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- # Create a group
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group_id = self.identity_api.create_group(group)['id']
-
- # Create a user
- user = dict(name=uuid.uuid4().hex,
- domain_id=CONF.identity.default_domain_id)
- user_id = self.identity_api.create_user(user)['id']
-
- # Add user to the group
- self.identity_api.add_user_to_group(user_id, group_id)
-
- user_ids = self.identity_api.list_users_in_group(group_id)
- dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
-
- self.assertNotIn(dumb_id, user_ids)
-
- def test_list_domains(self):
- # We have more domains here than the parent class, check for the
- # correct number of domains for the multildap backend configs
- domain1 = unit.new_domain_ref()
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- self.resource_api.create_domain(domain2['id'], domain2)
- domains = self.resource_api.list_domains()
- self.assertEqual(7, len(domains))
- domain_ids = []
- for domain in domains:
- domain_ids.append(domain.get('id'))
- self.assertIn(CONF.identity.default_domain_id, domain_ids)
- self.assertIn(domain1['id'], domain_ids)
- self.assertIn(domain2['id'], domain_ids)
-
- def test_authenticate_requires_simple_bind(self):
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_baz['id'],
- user['id'])
- driver = self.identity_api._select_identity_driver(
- user['domain_id'])
- driver.user.LDAP_USER = None
- driver.user.LDAP_PASSWORD = None
-
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=user['id'],
- password=None)
-
- # The group and domain CRUD tests below override the standard ones in
- # unit.identity.test_backends.py so that we can exclude the update name
- # test, since we do not (and will not) support the update of either group
- # or domain names with LDAP. In the tests below, the update is tested by
- # updating description.
- @mock.patch.object(versionutils, 'report_deprecated_feature')
- def test_group_crud(self, mock_deprecator):
- # NOTE(stevemar): As of the Mitaka release, we now check for calls that
- # the LDAP write functionality has been deprecated.
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("create_group for the LDAP identity backend", args[1])
-
- group_ref = self.identity_api.get_group(group['id'])
- self.assertDictEqual(group, group_ref)
- group['description'] = uuid.uuid4().hex
- self.identity_api.update_group(group['id'], group)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("update_group for the LDAP identity backend", args[1])
-
- group_ref = self.identity_api.get_group(group['id'])
- self.assertDictEqual(group, group_ref)
-
- self.identity_api.delete_group(group['id'])
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("delete_group for the LDAP identity backend", args[1])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group,
- group['id'])
-
- @mock.patch.object(versionutils, 'report_deprecated_feature')
- def test_add_remove_user_group_deprecated(self, mock_deprecator):
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.identity_api.add_user_to_group(user['id'], group['id'])
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("add_user_to_group for the LDAP identity", args[1])
-
- self.identity_api.remove_user_from_group(user['id'], group['id'])
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("remove_user_from_group for the LDAP identity", args[1])
-
- @unit.skip_if_cache_disabled('identity')
- def test_cache_layer_group_crud(self):
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- # cache the result
- group_ref = self.identity_api.get_group(group['id'])
- # delete the group bypassing identity api.
- domain_id, driver, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(group['id']))
- driver.delete_group(entity_id)
-
- self.assertEqual(group_ref,
- self.identity_api.get_group(group['id']))
- self.identity_api.get_group.invalidate(self.identity_api, group['id'])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group, group['id'])
-
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
- # cache the result
- self.identity_api.get_group(group['id'])
- group['description'] = uuid.uuid4().hex
- group_ref = self.identity_api.update_group(group['id'], group)
- self.assertDictContainsSubset(self.identity_api.get_group(group['id']),
- group_ref)
-
- def test_create_user_none_mapping(self):
- # When create a user where an attribute maps to None, the entry is
- # created without that attribute and it doesn't fail with a TypeError.
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.attribute_ignore = ['enabled', 'email',
- 'tenants', 'tenantId']
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id,
- project_id='maps_to_none')
-
- # If this doesn't raise, then the test is successful.
- user = self.identity_api.create_user(user)
-
- def test_create_user_with_boolean_string_names(self):
- # Ensure that any attribute that is equal to the string 'TRUE'
- # or 'FALSE' will not be converted to a boolean value, it
- # should be returned as is.
- boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
- 'TrUe' 'FaLse']
- for name in boolean_strings:
- user = self.new_user_ref(name=name,
- domain_id=CONF.identity.default_domain_id)
- user_ref = self.identity_api.create_user(user)
- user_info = self.identity_api.get_user(user_ref['id'])
- self.assertEqual(name, user_info['name'])
- # Delete the user to ensure that the Keystone uniqueness
- # requirements combined with the case-insensitive nature of a
- # typical LDAP schema does not cause subsequent names in
- # boolean_strings to clash.
- self.identity_api.delete_user(user_ref['id'])
-
- def test_unignored_user_none_mapping(self):
- # Ensure that an attribute that maps to None that is not explicitly
- # ignored in configuration is implicitly ignored without triggering
- # an error.
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.attribute_ignore = ['enabled', 'email',
- 'tenants', 'tenantId']
-
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- user_ref = self.identity_api.create_user(user)
-
- # If this doesn't raise, then the test is successful.
- self.identity_api.get_user(user_ref['id'])
-
- def test_update_user_name(self):
- """A user's name cannot be changed through the LDAP driver."""
- self.assertRaises(exception.Conflict,
- super(BaseLDAPIdentity, self).test_update_user_name)
-
- def test_arbitrary_attributes_are_returned_from_get_user(self):
- self.skipTest("Using arbitrary attributes doesn't work under LDAP")
-
- def test_new_arbitrary_attributes_are_returned_from_update_user(self):
- self.skipTest("Using arbitrary attributes doesn't work under LDAP")
-
- def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
- self.skipTest("Using arbitrary attributes doesn't work under LDAP")
-
- def test_cache_layer_domain_crud(self):
- # TODO(morganfainberg): This also needs to be removed when full LDAP
- # implementation is submitted. No need to duplicate the above test,
- # just skip this time.
- self.skipTest('Domains are read-only against LDAP')
-
- def test_user_id_comma(self):
- """Even if the user has a , in their ID, groups can be listed."""
- # Create a user with a , in their ID
- # NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
-
- # Since we want to fake up this special ID, we'll squirt this
- # direct into the driver and bypass the manager layer.
- user_id = u'Doe, John'
- user = self.new_user_ref(id=user_id,
- domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.driver.create_user(user_id, user)
-
- # Now we'll use the manager to discover it, which will create a
- # Public ID for it.
- ref_list = self.identity_api.list_users()
- public_user_id = None
- for ref in ref_list:
- if ref['name'] == user['name']:
- public_user_id = ref['id']
- break
-
- # Create a group
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group_id = group['id']
- group = self.identity_api.driver.create_group(group_id, group)
- # Now we'll use the manager to discover it, which will create a
- # Public ID for it.
- ref_list = self.identity_api.list_groups()
- public_group_id = None
- for ref in ref_list:
- if ref['name'] == group['name']:
- public_group_id = ref['id']
- break
-
- # Put the user in the group
- self.identity_api.add_user_to_group(public_user_id, public_group_id)
-
- # List groups for user.
- ref_list = self.identity_api.list_groups_for_user(public_user_id)
-
- group['id'] = public_group_id
- self.assertThat(ref_list, matchers.Equals([group]))
-
- def test_user_id_comma_grants(self):
- """List user and group grants, even with a comma in the user's ID."""
- # Create a user with a , in their ID
- # NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
-
- # Since we want to fake up this special ID, we'll squirt this
- # direct into the driver and bypass the manager layer
- user_id = u'Doe, John'
- user = self.new_user_ref(id=user_id,
- domain_id=CONF.identity.default_domain_id)
- self.identity_api.driver.create_user(user_id, user)
-
- # Now we'll use the manager to discover it, which will create a
- # Public ID for it.
- ref_list = self.identity_api.list_users()
- public_user_id = None
- for ref in ref_list:
- if ref['name'] == user['name']:
- public_user_id = ref['id']
- break
-
- # Grant the user a role on a project.
-
- role_id = 'member'
- project_id = self.tenant_baz['id']
-
- self.assignment_api.create_grant(role_id, user_id=public_user_id,
- project_id=project_id)
-
- role_ref = self.assignment_api.get_grant(role_id,
- user_id=public_user_id,
- project_id=project_id)
-
- self.assertEqual(role_id, role_ref['id'])
-
- def test_user_enabled_ignored_disable_error(self):
- # When the server is configured so that the enabled attribute is
- # ignored for users, users cannot be disabled.
-
- self.config_fixture.config(group='ldap',
- user_attribute_ignore=['enabled'])
-
- # Need to re-load backends for the config change to take effect.
- self.load_backends()
-
- # Attempt to disable the user.
- self.assertRaises(exception.ForbiddenAction,
- self.identity_api.update_user, self.user_foo['id'],
- {'enabled': False})
-
- user_info = self.identity_api.get_user(self.user_foo['id'])
-
- # If 'enabled' is ignored then 'enabled' isn't returned as part of the
- # ref.
- self.assertNotIn('enabled', user_info)
-
- def test_group_enabled_ignored_disable_error(self):
- # When the server is configured so that the enabled attribute is
- # ignored for groups, groups cannot be disabled.
-
- self.config_fixture.config(group='ldap',
- group_attribute_ignore=['enabled'])
-
- # Need to re-load backends for the config change to take effect.
- self.load_backends()
-
- # There's no group fixture so create a group.
- new_domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=new_domain['id'])
- new_group = self.identity_api.create_group(new_group)
-
- # Attempt to disable the group.
- self.assertRaises(exception.ForbiddenAction,
- self.identity_api.update_group, new_group['id'],
- {'enabled': False})
-
- group_info = self.identity_api.get_group(new_group['id'])
-
- # If 'enabled' is ignored then 'enabled' isn't returned as part of the
- # ref.
- self.assertNotIn('enabled', group_info)
-
- def test_project_enabled_ignored_disable_error(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_list_role_assignment_by_domain(self):
- """Multiple domain assignments are not supported."""
- self.assertRaises(
- (exception.Forbidden, exception.DomainNotFound,
- exception.ValidationError),
- super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain)
-
- def test_list_role_assignment_by_user_with_domain_group_roles(self):
- """Multiple domain assignments are not supported."""
- self.assertRaises(
- (exception.Forbidden, exception.DomainNotFound,
- exception.ValidationError),
- super(BaseLDAPIdentity, self).
- test_list_role_assignment_by_user_with_domain_group_roles)
-
- def test_domain_crud(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_list_role_assignment_using_sourced_groups_with_domains(self):
- """Multiple domain assignments are not supported."""
- self.assertRaises(
- (exception.Forbidden, exception.ValidationError,
- exception.DomainNotFound),
- super(BaseLDAPIdentity, self).
- test_list_role_assignment_using_sourced_groups_with_domains)
-
- def test_create_project_with_domain_id_and_without_parent_id(self):
- """Multiple domains are not supported."""
- self.assertRaises(
- exception.ValidationError,
- super(BaseLDAPIdentity, self).
- test_create_project_with_domain_id_and_without_parent_id)
-
- def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
- """Multiple domains are not supported."""
- self.assertRaises(
- exception.ValidationError,
- super(BaseLDAPIdentity, self).
- test_create_project_with_domain_id_mismatch_to_parent_domain)
-
- def test_remove_foreign_assignments_when_deleting_a_domain(self):
- """Multiple domains are not supported."""
- self.assertRaises(
- (exception.ValidationError, exception.DomainNotFound),
- super(BaseLDAPIdentity,
- self).test_remove_foreign_assignments_when_deleting_a_domain)
-
-
-class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
-
- def setUp(self):
- # NOTE(dstanek): The database must be setup prior to calling the
- # parent's setUp. The parent's setUp uses services (like
- # credentials) that require a database.
- self.useFixture(database.Database())
- super(LDAPIdentity, self).setUp()
- _assert_backends(self,
- assignment='sql',
- identity='ldap',
- resource='sql')
-
- def load_fixtures(self, fixtures):
- # Override super impl since need to create group container.
- create_group_container(self.identity_api)
- super(LDAPIdentity, self).load_fixtures(fixtures)
-
- def test_list_domains(self):
- domains = self.resource_api.list_domains()
- self.assertEqual([resource.calc_default_domain()], domains)
-
- def test_configurable_allowed_project_actions(self):
- domain = self._get_domain_fixture()
- project = unit.new_project_ref(domain_id=domain['id'])
- project = self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertEqual(project['id'], project_ref['id'])
-
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
-
- self.resource_api.delete_project(project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- def test_configurable_subtree_delete(self):
- self.config_fixture.config(group='ldap', allow_subtree_delete=True)
- self.load_backends()
-
- project1 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project1['id'], project1)
-
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
-
- user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user1 = self.identity_api.create_user(user1)
-
- self.assignment_api.add_role_to_user_and_project(
- user_id=user1['id'],
- tenant_id=project1['id'],
- role_id=role1['id'])
-
- self.resource_api.delete_project(project1['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project1['id'])
-
- self.resource_api.create_project(project1['id'], project1)
-
- list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'],
- project1['id'])
- self.assertEqual(0, len(list))
-
- def test_configurable_forbidden_project_actions(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_project_filter(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_dumb_member(self):
- self.config_fixture.config(group='ldap', use_dumb_member=True)
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
- dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- dumb_id)
-
- def test_project_attribute_mapping(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_project_attribute_ignore(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_user_enable_attribute_mask(self):
- self.config_fixture.config(group='ldap', user_enabled_mask=2,
- user_enabled_default='512')
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- user_ref = self.identity_api.create_user(user)
-
- # Use assertIs rather than assertTrue because assertIs will assert the
- # value is a Boolean as expected.
- self.assertIs(user_ref['enabled'], True)
- self.assertNotIn('enabled_nomask', user_ref)
-
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([512], enabled_vals)
-
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(user_ref['enabled'], True)
- self.assertNotIn('enabled_nomask', user_ref)
-
- user['enabled'] = False
- user_ref = self.identity_api.update_user(user_ref['id'], user)
- self.assertIs(user_ref['enabled'], False)
- self.assertNotIn('enabled_nomask', user_ref)
-
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([514], enabled_vals)
-
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(user_ref['enabled'], False)
- self.assertNotIn('enabled_nomask', user_ref)
-
- user['enabled'] = True
- user_ref = self.identity_api.update_user(user_ref['id'], user)
- self.assertIs(user_ref['enabled'], True)
- self.assertNotIn('enabled_nomask', user_ref)
-
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([512], enabled_vals)
-
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(user_ref['enabled'], True)
- self.assertNotIn('enabled_nomask', user_ref)
-
- def test_user_enabled_invert(self):
- self.config_fixture.config(group='ldap', user_enabled_invert=True,
- user_enabled_default=False)
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- user2 = self.new_user_ref(enabled=False,
- domain_id=CONF.identity.default_domain_id)
-
- user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- # Ensure that the LDAP attribute is False for a newly created
- # enabled user.
- user_ref = self.identity_api.create_user(user1)
- self.assertIs(True, user_ref['enabled'])
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([False], enabled_vals)
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(True, user_ref['enabled'])
-
- # Ensure that the LDAP attribute is True for a disabled user.
- user1['enabled'] = False
- user_ref = self.identity_api.update_user(user_ref['id'], user1)
- self.assertIs(False, user_ref['enabled'])
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([True], enabled_vals)
-
- # Enable the user and ensure that the LDAP attribute is True again.
- user1['enabled'] = True
- user_ref = self.identity_api.update_user(user_ref['id'], user1)
- self.assertIs(True, user_ref['enabled'])
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([False], enabled_vals)
-
- # Ensure that the LDAP attribute is True for a newly created
- # disabled user.
- user_ref = self.identity_api.create_user(user2)
- self.assertIs(False, user_ref['enabled'])
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([True], enabled_vals)
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(False, user_ref['enabled'])
-
- # Ensure that the LDAP attribute is inverted for a newly created
- # user when the user_enabled_default setting is used.
- user_ref = self.identity_api.create_user(user3)
- self.assertIs(True, user_ref['enabled'])
- enabled_vals = self.get_user_enabled_vals(user_ref)
- self.assertEqual([False], enabled_vals)
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(True, user_ref['enabled'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_enabled_invert_no_enabled_value(self, mock_ldap_get):
- self.config_fixture.config(group='ldap', user_enabled_invert=True,
- user_enabled_default=False)
- # Mock the search results to return an entry with
- # no enabled value.
- mock_ldap_get.return_value = (
- 'cn=junk,dc=example,dc=com',
- {
- 'sn': [uuid.uuid4().hex],
- 'email': [uuid.uuid4().hex],
- 'cn': ['junk']
- }
- )
-
- user_api = identity.backends.ldap.UserApi(CONF)
- user_ref = user_api.get('junk')
- # Ensure that the model enabled attribute is inverted
- # from the resource default.
- self.assertIs(not CONF.ldap.user_enabled_default, user_ref['enabled'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_enabled_invert_default_str_value(self, mock_ldap_get):
- self.config_fixture.config(group='ldap', user_enabled_invert=True,
- user_enabled_default='False')
- # Mock the search results to return an entry with
- # no enabled value.
- mock_ldap_get.return_value = (
- 'cn=junk,dc=example,dc=com',
- {
- 'sn': [uuid.uuid4().hex],
- 'email': [uuid.uuid4().hex],
- 'cn': ['junk']
- }
- )
-
- user_api = identity.backends.ldap.UserApi(CONF)
- user_ref = user_api.get('junk')
- # Ensure that the model enabled attribute is inverted
- # from the resource default.
- self.assertIs(True, user_ref['enabled'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_enabled_attribute_handles_expired(self, mock_ldap_get):
- # If using 'passwordisexpired' as enabled attribute, and inverting it,
- # Then an unauthorized user (expired password) should not be enabled.
- self.config_fixture.config(group='ldap', user_enabled_invert=True,
- user_enabled_attribute='passwordisexpired')
- mock_ldap_get.return_value = (
- u'uid=123456789,c=us,ou=our_ldap,o=acme.com',
- {
- 'uid': [123456789],
- 'mail': ['shaun@acme.com'],
- 'passwordisexpired': ['TRUE'],
- 'cn': ['uid=123456789,c=us,ou=our_ldap,o=acme.com']
- }
- )
-
- user_api = identity.backends.ldap.UserApi(CONF)
- user_ref = user_api.get('123456789')
- self.assertIs(False, user_ref['enabled'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_enabled_attribute_handles_utf8(self, mock_ldap_get):
- # If using 'passwordisexpired' as enabled attribute, and inverting it,
- # and the result is utf8 encoded, then the an authorized user should
- # be enabled.
- self.config_fixture.config(group='ldap', user_enabled_invert=True,
- user_enabled_attribute='passwordisexpired')
- mock_ldap_get.return_value = (
- u'uid=123456789,c=us,ou=our_ldap,o=acme.com',
- {
- 'uid': [123456789],
- 'mail': [u'shaun@acme.com'],
- 'passwordisexpired': [u'false'],
- 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com']
- }
- )
-
- user_api = identity.backends.ldap.UserApi(CONF)
- user_ref = user_api.get('123456789')
- self.assertIs(True, user_ref['enabled'])
-
- @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'simple_bind_s')
- def test_user_api_get_connection_no_user_password(self, mocked_method):
- """Don't bind in case the user and password are blank."""
- # Ensure the username/password are in-fact blank
- self.config_fixture.config(group='ldap', user=None, password=None)
- user_api = identity.backends.ldap.UserApi(CONF)
- user_api.get_connection(user=None, password=None)
- self.assertFalse(mocked_method.called,
- msg='`simple_bind_s` method was unexpectedly called')
-
- @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
- def test_chase_referrals_off(self, mocked_fakeldap):
- self.config_fixture.config(
- group='ldap',
- url='fake://memory',
- chase_referrals=False)
- user_api = identity.backends.ldap.UserApi(CONF)
- user_api.get_connection(user=None, password=None)
-
- # The last call_arg should be a dictionary and should contain
- # chase_referrals. Check to make sure the value of chase_referrals
- # is as expected.
- self.assertFalse(mocked_fakeldap.call_args[-1]['chase_referrals'])
-
- @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
- def test_chase_referrals_on(self, mocked_fakeldap):
- self.config_fixture.config(
- group='ldap',
- url='fake://memory',
- chase_referrals=True)
- user_api = identity.backends.ldap.UserApi(CONF)
- user_api.get_connection(user=None, password=None)
-
- # The last call_arg should be a dictionary and should contain
- # chase_referrals. Check to make sure the value of chase_referrals
- # is as expected.
- self.assertTrue(mocked_fakeldap.call_args[-1]['chase_referrals'])
-
- @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
- def test_debug_level_set(self, mocked_fakeldap):
- level = 12345
- self.config_fixture.config(
- group='ldap',
- url='fake://memory',
- debug_level=level)
- user_api = identity.backends.ldap.UserApi(CONF)
- user_api.get_connection(user=None, password=None)
-
- # The last call_arg should be a dictionary and should contain
- # debug_level. Check to make sure the value of debug_level
- # is as expected.
- self.assertEqual(level, mocked_fakeldap.call_args[-1]['debug_level'])
-
- def test_wrong_ldap_scope(self):
- self.config_fixture.config(group='ldap', query_scope=uuid.uuid4().hex)
- self.assertRaisesRegexp(
- ValueError,
- 'Invalid LDAP scope: %s. *' % CONF.ldap.query_scope,
- identity.backends.ldap.Identity)
-
- def test_wrong_alias_dereferencing(self):
- self.config_fixture.config(group='ldap',
- alias_dereferencing=uuid.uuid4().hex)
- self.assertRaisesRegexp(
- ValueError,
- 'Invalid LDAP deref option: %s\.' % CONF.ldap.alias_dereferencing,
- identity.backends.ldap.Identity)
-
- def test_is_dumb_member(self):
- self.config_fixture.config(group='ldap',
- use_dumb_member=True)
- self.load_backends()
-
- dn = 'cn=dumb,dc=nonexistent'
- self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn))
-
- def test_is_dumb_member_upper_case_keys(self):
- self.config_fixture.config(group='ldap',
- use_dumb_member=True)
- self.load_backends()
-
- dn = 'CN=dumb,DC=nonexistent'
- self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn))
-
- def test_is_dumb_member_with_false_use_dumb_member(self):
- self.config_fixture.config(group='ldap',
- use_dumb_member=False)
- self.load_backends()
- dn = 'cn=dumb,dc=nonexistent'
- self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn))
-
- def test_is_dumb_member_not_dumb(self):
- self.config_fixture.config(group='ldap',
- use_dumb_member=True)
- self.load_backends()
- dn = 'ou=some,dc=example.com'
- self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn))
-
- def test_user_extra_attribute_mapping(self):
- self.config_fixture.config(
- group='ldap',
- user_additional_attribute_mapping=['description:name'])
- self.load_backends()
- user = self.new_user_ref(name='EXTRA_ATTRIBUTES',
- password='extra',
- domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- dn, attrs = self.identity_api.driver.user._ldap_get(user['id'])
- self.assertThat([user['name']], matchers.Equals(attrs['description']))
-
- def test_user_description_attribute_mapping(self):
- self.config_fixture.config(
- group='ldap',
- user_description_attribute='displayName')
- self.load_backends()
-
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id,
- displayName=uuid.uuid4().hex)
- description = user['displayName']
- user = self.identity_api.create_user(user)
- res = self.identity_api.driver.user.get_all()
-
- new_user = [u for u in res if u['id'] == user['id']][0]
- self.assertThat(new_user['description'], matchers.Equals(description))
-
- def test_user_extra_attribute_mapping_description_is_returned(self):
- # Given a mapping like description:description, the description is
- # returned.
-
- self.config_fixture.config(
- group='ldap',
- user_additional_attribute_mapping=['description:description'])
- self.load_backends()
-
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id,
- description=uuid.uuid4().hex)
- description = user['description']
- user = self.identity_api.create_user(user)
- res = self.identity_api.driver.user.get_all()
-
- new_user = [u for u in res if u['id'] == user['id']][0]
- self.assertThat(new_user['description'], matchers.Equals(description))
-
- def test_user_with_missing_id(self):
- # create a user that doesn't have the id attribute
- ldap_ = self.identity_api.driver.user.get_connection()
- # `sn` is used for the attribute in the DN because it's allowed by
- # the entry's objectclasses so that this test could conceivably run in
- # the live tests.
- ldap_id_field = 'sn'
- ldap_id_value = uuid.uuid4().hex
- dn = '%s=%s,ou=Users,cn=example,cn=com' % (ldap_id_field,
- ldap_id_value)
- modlist = [('objectClass', ['person', 'inetOrgPerson']),
- (ldap_id_field, [ldap_id_value]),
- ('mail', ['email@example.com']),
- ('userPassword', [uuid.uuid4().hex])]
- ldap_.add_s(dn, modlist)
-
- # make sure the user doesn't break other users
- users = self.identity_api.driver.user.get_all()
- self.assertThat(users, matchers.HasLength(len(default_fixtures.USERS)))
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_mixed_case_attribute(self, mock_ldap_get):
- # Mock the search results to return attribute names
- # with unexpected case.
- mock_ldap_get.return_value = (
- 'cn=junk,dc=example,dc=com',
- {
- 'sN': [uuid.uuid4().hex],
- 'MaIl': [uuid.uuid4().hex],
- 'cn': ['junk']
- }
- )
- user = self.identity_api.get_user('junk')
- self.assertEqual(mock_ldap_get.return_value[1]['sN'][0],
- user['name'])
- self.assertEqual(mock_ldap_get.return_value[1]['MaIl'][0],
- user['email'])
-
- def test_parse_extra_attribute_mapping(self):
- option_list = ['description:name', 'gecos:password',
- 'fake:invalid', 'invalid1', 'invalid2:',
- 'description:name:something']
- mapping = self.identity_api.driver.user._parse_extra_attrs(option_list)
- expected_dict = {'description': 'name', 'gecos': 'password',
- 'fake': 'invalid', 'invalid2': ''}
- self.assertDictEqual(expected_dict, mapping)
-
- def test_create_domain(self):
- domain = unit.new_domain_ref()
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_domain,
- domain['id'],
- domain)
-
- @unit.skip_if_no_multiple_domains_support
- def test_create_domain_case_sensitivity(self):
- # domains are read-only, so case sensitivity isn't an issue
- ref = unit.new_domain_ref()
- self.assertRaises(exception.Forbidden,
- self.resource_api.create_domain,
- ref['id'],
- ref)
-
- def test_cache_layer_domain_crud(self):
- # TODO(morganfainberg): This also needs to be removed when full LDAP
- # implementation is submitted. No need to duplicate the above test,
- # just skip this time.
- self.skipTest('Domains are read-only against LDAP')
-
- def test_domain_rename_invalidates_get_domain_by_name_cache(self):
- parent = super(LDAPIdentity, self)
- self.assertRaises(
- exception.Forbidden,
- parent.test_domain_rename_invalidates_get_domain_by_name_cache)
-
- def test_project_rename_invalidates_get_project_by_name_cache(self):
- parent = super(LDAPIdentity, self)
- self.assertRaises(
- exception.Forbidden,
- parent.test_project_rename_invalidates_get_project_by_name_cache)
-
- def test_project_crud(self):
- # NOTE(topol): LDAP implementation does not currently support the
- # updating of a project name so this method override
- # provides a different update test
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
-
- project = self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
-
- self.assertDictEqual(project, project_ref)
-
- project['description'] = uuid.uuid4().hex
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project, project_ref)
-
- self.resource_api.delete_project(project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- @unit.skip_if_cache_disabled('assignment')
- def test_cache_layer_project_crud(self):
- # NOTE(morganfainberg): LDAP implementation does not currently support
- # updating project names. This method override provides a different
- # update test.
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project_id = project['id']
- # Create a project
- project = self.resource_api.create_project(project_id, project)
- self.resource_api.get_project(project_id)
- updated_project = copy.deepcopy(project)
- updated_project['description'] = uuid.uuid4().hex
- # Update project, bypassing resource manager
- self.resource_api.driver.update_project(project_id,
- updated_project)
- # Verify get_project still returns the original project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Invalidate cache
- self.resource_api.get_project.invalidate(self.resource_api,
- project_id)
- # Verify get_project now returns the new project
- self.assertDictContainsSubset(
- updated_project,
- self.resource_api.get_project(project_id))
- # Update project using the resource_api manager back to original
- self.resource_api.update_project(project['id'], project)
- # Verify get_project returns the original project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Delete project bypassing resource_api
- self.resource_api.driver.delete_project(project_id)
- # Verify get_project still returns the project_ref
- self.assertDictContainsSubset(
- project, self.resource_api.get_project(project_id))
- # Invalidate cache
- self.resource_api.get_project.invalidate(self.resource_api,
- project_id)
- # Verify ProjectNotFound now raised
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project_id)
- # recreate project
- self.resource_api.create_project(project_id, project)
- self.resource_api.get_project(project_id)
- # delete project
- self.resource_api.delete_project(project_id)
- # Verify ProjectNotFound is raised
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project_id)
-
- def test_update_is_domain_field(self):
- domain = self._get_domain_fixture()
- project = unit.new_project_ref(domain_id=domain['id'])
- project = self.resource_api.create_project(project['id'], project)
-
- # Try to update the is_domain field to True
- project['is_domain'] = True
- self.assertRaises(exception.ValidationError,
- self.resource_api.update_project,
- project['id'], project)
-
- def test_delete_is_domain_project(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_create_domain_under_regular_project_hierarchy_fails(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_create_not_is_domain_project_under_is_domain_hierarchy(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_create_project_passing_is_domain_flag_true(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_create_project_with_parent_id_and_without_domain_id(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_check_leaf_projects(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_list_projects_in_subtree(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_list_projects_in_subtree_with_circular_reference(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_list_project_parents(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_update_project_enabled_cascade(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_cannot_enable_cascade_with_parent_disabled(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_hierarchical_projects_crud(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_create_project_under_disabled_one(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_create_project_with_invalid_parent(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_create_leaf_project_with_invalid_domain(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_update_project_parent(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_enable_project_with_disabled_parent(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_disable_hierarchical_leaf_project(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_disable_hierarchical_not_leaf_project(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_delete_hierarchical_leaf_project(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_delete_hierarchical_not_leaf_project(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_check_hierarchy_depth(self):
- self.skipTest('Resource LDAP has been removed')
-
- def test_multi_role_grant_by_user_group_on_project_domain(self):
- # This is a partial implementation of the standard test that
- # is defined in unit.assignment.test_backends.py. It omits
- # both domain and group grants. since neither of these are
- # yet supported by the ldap backend.
-
- role_list = []
- for _ in range(2):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user1 = self.identity_api.create_user(user1)
- project1 = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(project1['id'], project1)
-
- self.assignment_api.add_role_to_user_and_project(
- user_id=user1['id'],
- tenant_id=project1['id'],
- role_id=role_list[0]['id'])
- self.assignment_api.add_role_to_user_and_project(
- user_id=user1['id'],
- tenant_id=project1['id'],
- role_id=role_list[1]['id'])
-
- # Although list_grants are not yet supported, we can test the
- # alternate way of getting back lists of grants, where user
- # and group roles are combined. Only directly assigned user
- # roles are available, since group grants are not yet supported
-
- combined_list = self.assignment_api.get_roles_for_user_and_project(
- user1['id'],
- project1['id'])
- self.assertEqual(2, len(combined_list))
- self.assertIn(role_list[0]['id'], combined_list)
- self.assertIn(role_list[1]['id'], combined_list)
-
- # Finally, although domain roles are not implemented, check we can
- # issue the combined get roles call with benign results, since thus is
- # used in token generation
-
- combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
- user1['id'], CONF.identity.default_domain_id)
- self.assertEqual(0, len(combined_role_list))
-
- def test_list_projects_for_alternate_domain(self):
- self.skipTest(
- 'N/A: LDAP does not support multiple domains')
-
- def test_get_default_domain_by_name(self):
- domain = self._get_domain_fixture()
-
- domain_ref = self.resource_api.get_domain_by_name(domain['name'])
- self.assertEqual(domain_ref, domain)
-
- def test_base_ldap_connection_deref_option(self):
- def get_conn(deref_name):
- self.config_fixture.config(group='ldap',
- alias_dereferencing=deref_name)
- base_ldap = common_ldap.BaseLdap(CONF)
- return base_ldap.get_connection()
-
- conn = get_conn('default')
- self.assertEqual(ldap.get_option(ldap.OPT_DEREF),
- conn.get_option(ldap.OPT_DEREF))
-
- conn = get_conn('always')
- self.assertEqual(ldap.DEREF_ALWAYS,
- conn.get_option(ldap.OPT_DEREF))
-
- conn = get_conn('finding')
- self.assertEqual(ldap.DEREF_FINDING,
- conn.get_option(ldap.OPT_DEREF))
-
- conn = get_conn('never')
- self.assertEqual(ldap.DEREF_NEVER,
- conn.get_option(ldap.OPT_DEREF))
-
- conn = get_conn('searching')
- self.assertEqual(ldap.DEREF_SEARCHING,
- conn.get_option(ldap.OPT_DEREF))
-
- def test_list_users_no_dn(self):
- users = self.identity_api.list_users()
- self.assertEqual(len(default_fixtures.USERS), len(users))
- user_ids = set(user['id'] for user in users)
- expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
- for user in default_fixtures.USERS)
- for user_ref in users:
- self.assertNotIn('dn', user_ref)
- self.assertEqual(expected_user_ids, user_ids)
-
- def test_list_groups_no_dn(self):
- # Create some test groups.
- domain = self._get_domain_fixture()
- expected_group_ids = []
- numgroups = 3
- for _ in range(numgroups):
- group = unit.new_group_ref(domain_id=domain['id'])
- group = self.identity_api.create_group(group)
- expected_group_ids.append(group['id'])
- # Fetch the test groups and ensure that they don't contain a dn.
- groups = self.identity_api.list_groups()
- self.assertEqual(numgroups, len(groups))
- group_ids = set(group['id'] for group in groups)
- for group_ref in groups:
- self.assertNotIn('dn', group_ref)
- self.assertEqual(set(expected_group_ids), group_ids)
-
- def test_list_groups_for_user_no_dn(self):
- # Create a test user.
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- # Create some test groups and add the test user as a member.
- domain = self._get_domain_fixture()
- expected_group_ids = []
- numgroups = 3
- for _ in range(numgroups):
- group = unit.new_group_ref(domain_id=domain['id'])
- group = self.identity_api.create_group(group)
- expected_group_ids.append(group['id'])
- self.identity_api.add_user_to_group(user['id'], group['id'])
- # Fetch the groups for the test user
- # and ensure they don't contain a dn.
- groups = self.identity_api.list_groups_for_user(user['id'])
- self.assertEqual(numgroups, len(groups))
- group_ids = set(group['id'] for group in groups)
- for group_ref in groups:
- self.assertNotIn('dn', group_ref)
- self.assertEqual(set(expected_group_ids), group_ids)
-
- def test_user_id_attribute_in_create(self):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.id_attr = 'mail'
-
- user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- user_ref = self.identity_api.get_user(user['id'])
- # 'email' attribute should've created because it is also being used
- # as user_id
- self.assertEqual(user_ref['id'], user_ref['email'])
-
- def test_user_id_attribute_map(self):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.id_attr = 'mail'
-
- user_ref = self.identity_api.get_user(self.user_foo['email'])
- # the user_id_attribute map should be honored, which means
- # user_ref['id'] should contains the email attribute
- self.assertEqual(self.user_foo['email'], user_ref['id'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_get_id_from_dn_for_multivalued_attribute_id(self, mock_ldap_get):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.id_attr = 'mail'
-
- # make 'email' multivalued so we can test the error condition
- email1 = uuid.uuid4().hex
- email2 = uuid.uuid4().hex
- mock_ldap_get.return_value = (
- 'cn=nobodycares,dc=example,dc=com',
- {
- 'sn': [uuid.uuid4().hex],
- 'mail': [email1, email2],
- 'cn': 'nobodycares'
- }
- )
-
- user_ref = self.identity_api.get_user(email1)
- # make sure we get the ID from DN (old behavior) if the ID attribute
- # has multiple values
- self.assertEqual('nobodycares', user_ref['id'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_id_attribute_not_found(self, mock_ldap_get):
- mock_ldap_get.return_value = (
- 'cn=nobodycares,dc=example,dc=com',
- {
- 'sn': [uuid.uuid4().hex],
- }
- )
-
- user_api = identity.backends.ldap.UserApi(CONF)
- self.assertRaises(exception.NotFound,
- user_api.get,
- 'nobodycares')
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_id_not_in_dn(self, mock_ldap_get):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.id_attr = 'uid'
- driver.user.attribute_mapping['name'] = 'cn'
-
- mock_ldap_get.return_value = (
- 'foo=bar,dc=example,dc=com',
- {
- 'sn': [uuid.uuid4().hex],
- 'foo': ['bar'],
- 'cn': ['junk'],
- 'uid': ['crap']
- }
- )
- user_ref = self.identity_api.get_user('crap')
- self.assertEqual('crap', user_ref['id'])
- self.assertEqual('junk', user_ref['name'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_name_in_dn(self, mock_ldap_get):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.id_attr = 'SAMAccountName'
- driver.user.attribute_mapping['name'] = 'cn'
-
- mock_ldap_get.return_value = (
- 'cn=Foo Bar,dc=example,dc=com',
- {
- 'sn': [uuid.uuid4().hex],
- 'cn': ['Foo Bar'],
- 'SAMAccountName': ['crap']
- }
- )
- user_ref = self.identity_api.get_user('crap')
- self.assertEqual('crap', user_ref['id'])
- self.assertEqual('Foo Bar', user_ref['name'])
-
-
-class LDAPLimitTests(unit.TestCase, identity_tests.LimitTests):
- def setUp(self):
- super(LDAPLimitTests, self).setUp()
-
- self.useFixture(ldapdb.LDAPDatabase())
- self.useFixture(database.Database(self.sql_driver_version_overrides))
- self.load_backends()
- self.load_fixtures(default_fixtures)
- identity_tests.LimitTests.setUp(self)
- _assert_backends(self,
- assignment='sql',
- identity='ldap',
- resource='sql')
-
- def config_overrides(self):
- super(LDAPLimitTests, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='identity',
- list_limit=len(default_fixtures.USERS) - 1)
-
- def config_files(self):
- config_files = super(LDAPLimitTests, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- def test_list_projects_filtered_and_limited(self):
- self.skipTest("ldap for storing projects is deprecated")
-
-
-class LDAPIdentityEnabledEmulation(LDAPIdentity):
- def setUp(self):
- super(LDAPIdentityEnabledEmulation, self).setUp()
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
- for obj in [self.tenant_bar, self.tenant_baz, self.user_foo,
- self.user_two, self.user_badguy]:
- obj.setdefault('enabled', True)
- _assert_backends(self, identity='ldap')
-
- def load_fixtures(self, fixtures):
- # Override super impl since need to create group container.
- create_group_container(self.identity_api)
- super(LDAPIdentity, self).load_fixtures(fixtures)
-
- def config_files(self):
- config_files = super(LDAPIdentityEnabledEmulation, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- def config_overrides(self):
- super(LDAPIdentityEnabledEmulation, self).config_overrides()
- self.config_fixture.config(group='ldap',
- user_enabled_emulation=True)
-
- def test_project_crud(self):
- # NOTE(topol): LDAPIdentityEnabledEmulation will create an
- # enabled key in the project dictionary so this
- # method override handles this side-effect
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
-
- project = self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
-
- # self.resource_api.create_project adds an enabled
- # key with a value of True when LDAPIdentityEnabledEmulation
- # is used so we now add this expected key to the project dictionary
- project['enabled'] = True
- self.assertDictEqual(project, project_ref)
-
- project['description'] = uuid.uuid4().hex
- self.resource_api.update_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project, project_ref)
-
- self.resource_api.delete_project(project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- @mock.patch.object(versionutils, 'report_deprecated_feature')
- def test_user_crud(self, mock_deprecator):
- # NOTE(stevemar): As of the Mitaka release, we now check for calls that
- # the LDAP write functionality has been deprecated.
- user_dict = self.new_user_ref(
- domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user_dict)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("create_user for the LDAP identity backend", args[1])
-
- del user_dict['password']
- user_ref = self.identity_api.get_user(user['id'])
- user_ref_dict = {x: user_ref[x] for x in user_ref}
- self.assertDictContainsSubset(user_dict, user_ref_dict)
-
- user_dict['password'] = uuid.uuid4().hex
- self.identity_api.update_user(user['id'], user_dict)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("update_user for the LDAP identity backend", args[1])
-
- del user_dict['password']
- user_ref = self.identity_api.get_user(user['id'])
- user_ref_dict = {x: user_ref[x] for x in user_ref}
- self.assertDictContainsSubset(user_dict, user_ref_dict)
-
- self.identity_api.delete_user(user['id'])
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("delete_user for the LDAP identity backend", args[1])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- user['id'])
-
- def test_user_auth_emulated(self):
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.user.enabled_emulation_dn = 'cn=test,dc=test'
- self.identity_api.authenticate(
- context={},
- user_id=self.user_foo['id'],
- password=self.user_foo['password'])
-
- def test_user_enable_attribute_mask(self):
- self.skipTest(
- "Enabled emulation conflicts with enabled mask")
-
- def test_user_enabled_use_group_config(self):
- self.config_fixture.config(
- group='ldap',
- user_enabled_emulation_use_group_config=True,
- group_member_attribute='uniqueMember',
- group_objectclass='groupOfUniqueNames')
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- # Create a user and ensure they are enabled.
- user1 = unit.new_user_ref(enabled=True,
- domain_id=CONF.identity.default_domain_id)
- user_ref = self.identity_api.create_user(user1)
- self.assertIs(True, user_ref['enabled'])
-
- # Get a user and ensure they are enabled.
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(True, user_ref['enabled'])
-
- def test_user_enabled_invert(self):
- self.config_fixture.config(group='ldap', user_enabled_invert=True,
- user_enabled_default=False)
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- user2 = self.new_user_ref(enabled=False,
- domain_id=CONF.identity.default_domain_id)
-
- user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
-
- # Ensure that the enabled LDAP attribute is not set for a
- # newly created enabled user.
- user_ref = self.identity_api.create_user(user1)
- self.assertIs(True, user_ref['enabled'])
- self.assertIsNone(self.get_user_enabled_vals(user_ref))
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(True, user_ref['enabled'])
-
- # Ensure that an enabled LDAP attribute is not set for a disabled user.
- user1['enabled'] = False
- user_ref = self.identity_api.update_user(user_ref['id'], user1)
- self.assertIs(False, user_ref['enabled'])
- self.assertIsNone(self.get_user_enabled_vals(user_ref))
-
- # Enable the user and ensure that the LDAP enabled
- # attribute is not set.
- user1['enabled'] = True
- user_ref = self.identity_api.update_user(user_ref['id'], user1)
- self.assertIs(True, user_ref['enabled'])
- self.assertIsNone(self.get_user_enabled_vals(user_ref))
-
- # Ensure that the LDAP enabled attribute is not set for a
- # newly created disabled user.
- user_ref = self.identity_api.create_user(user2)
- self.assertIs(False, user_ref['enabled'])
- self.assertIsNone(self.get_user_enabled_vals(user_ref))
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(False, user_ref['enabled'])
-
- # Ensure that the LDAP enabled attribute is not set for a newly created
- # user when the user_enabled_default setting is used.
- user_ref = self.identity_api.create_user(user3)
- self.assertIs(True, user_ref['enabled'])
- self.assertIsNone(self.get_user_enabled_vals(user_ref))
- user_ref = self.identity_api.get_user(user_ref['id'])
- self.assertIs(True, user_ref['enabled'])
-
- def test_user_enabled_invert_no_enabled_value(self):
- self.skipTest(
- "N/A: Covered by test_user_enabled_invert")
-
- def test_user_enabled_invert_default_str_value(self):
- self.skipTest(
- "N/A: Covered by test_user_enabled_invert")
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
- def test_user_enabled_attribute_handles_utf8(self, mock_ldap_get):
- # Since user_enabled_emulation is enabled in this test, this test will
- # fail since it's using user_enabled_invert.
- self.config_fixture.config(group='ldap', user_enabled_invert=True,
- user_enabled_attribute='passwordisexpired')
- mock_ldap_get.return_value = (
- u'uid=123456789,c=us,ou=our_ldap,o=acme.com',
- {
- 'uid': [123456789],
- 'mail': [u'shaun@acme.com'],
- 'passwordisexpired': [u'false'],
- 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com']
- }
- )
-
- user_api = identity.backends.ldap.UserApi(CONF)
- user_ref = user_api.get('123456789')
- self.assertIs(False, user_ref['enabled'])
-
- def test_escape_member_dn(self):
- # The enabled member DN is properly escaped when querying for enabled
- # user.
-
- object_id = uuid.uuid4().hex
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
-
- # driver.user is the EnabledEmuMixIn implementation used for this test.
- mixin_impl = driver.user
-
- # ) is a special char in a filter and must be escaped.
- sample_dn = 'cn=foo)bar'
- # LDAP requires ) is escaped by being replaced with "\29"
- sample_dn_filter_esc = r'cn=foo\29bar'
-
- # Override the tree_dn, it's used to build the enabled member filter
- mixin_impl.tree_dn = sample_dn
-
- # The filter that _get_enabled is going to build contains the
- # tree_dn, which better be escaped in this case.
- exp_filter = '(%s=%s=%s,%s)' % (
- mixin_impl.member_attribute, mixin_impl.id_attr, object_id,
- sample_dn_filter_esc)
-
- with mixin_impl.get_connection() as conn:
- m = self.useFixture(mockpatch.PatchObject(conn, 'search_s')).mock
- mixin_impl._get_enabled(object_id, conn)
- # The 3rd argument is the DN.
- self.assertEqual(exp_filter, m.call_args[0][2])
-
-
-class LDAPPosixGroupsTest(unit.TestCase):
-
- def setUp(self):
-
- super(LDAPPosixGroupsTest, self).setUp()
-
- self.useFixture(ldapdb.LDAPDatabase())
- self.useFixture(database.Database())
-
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- _assert_backends(self, identity='ldap')
-
- def load_fixtures(self, fixtures):
- # Override super impl since need to create group container.
- create_group_container(self.identity_api)
- super(LDAPPosixGroupsTest, self).load_fixtures(fixtures)
-
- def config_overrides(self):
- super(LDAPPosixGroupsTest, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='ldap', group_members_are_ids=True,
- group_member_attribute='memberUID')
-
- def config_files(self):
- config_files = super(LDAPPosixGroupsTest, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- def _get_domain_fixture(self):
- """Domains in LDAP are read-only, so just return the static one."""
- return self.resource_api.get_domain(CONF.identity.default_domain_id)
-
- def test_posix_member_id(self):
- domain = self._get_domain_fixture()
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- # Make sure we get an empty list back on a new group, not an error.
- user_refs = self.identity_api.list_users_in_group(new_group['id'])
- self.assertEqual([], user_refs)
- # Make sure we get the correct users back once they have been added
- # to the group.
- new_user = unit.new_user_ref(domain_id=domain['id'])
- new_user = self.identity_api.create_user(new_user)
-
- # NOTE(amakarov): Create the group directly using LDAP operations
- # rather than going through the manager.
- group_api = self.identity_api.driver.group
- group_ref = group_api.get(new_group['id'])
- mod = (ldap.MOD_ADD, group_api.member_attribute, new_user['id'])
- conn = group_api.get_connection()
- conn.modify_s(group_ref['dn'], [mod])
-
- # Testing the case "the group contains a user"
- user_refs = self.identity_api.list_users_in_group(new_group['id'])
- self.assertIn(new_user['id'], (x['id'] for x in user_refs))
-
- # Testing the case "the user is a member of a group"
- group_refs = self.identity_api.list_groups_for_user(new_user['id'])
- self.assertIn(new_group['id'], (x['id'] for x in group_refs))
-
-
-class LdapIdentityWithMapping(
- BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase):
- """Class to test mapping of default LDAP backend.
-
- The default configuration is not to enable mapping when using a single
- backend LDAP driver. However, a cloud provider might want to enable
- the mapping, hence hiding the LDAP IDs from any clients of keystone.
- Setting backward_compatible_ids to False will enable this mapping.
-
- """
-
- def config_files(self):
- config_files = super(LdapIdentityWithMapping, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf'))
- return config_files
-
- def setUp(self):
- sqldb = self.useFixture(database.Database())
- super(LdapIdentityWithMapping, self).setUp()
- self.ldapdb.clear()
- self.load_backends()
- cache.configure_cache()
-
- sqldb.recreate()
- self.load_fixtures(default_fixtures)
- # defaulted by the data load
- self.user_foo['enabled'] = True
- _assert_backends(self, identity='ldap')
-
- def config_overrides(self):
- super(LdapIdentityWithMapping, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='identity_mapping',
- backward_compatible_ids=False)
-
- def test_dynamic_mapping_build(self):
- """Test to ensure entities not create via controller are mapped.
-
- Many LDAP backends will, essentially, by Read Only. In these cases
- the mapping is not built by creating objects, rather from enumerating
- the entries. We test this here my manually deleting the mapping and
- then trying to re-read the entries.
-
- """
- initial_mappings = len(mapping_sql.list_id_mappings())
- user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user1 = self.identity_api.create_user(user1)
- user2 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user2 = self.identity_api.create_user(user2)
- mappings = mapping_sql.list_id_mappings()
- self.assertEqual(initial_mappings + 2, len(mappings))
-
- # Now delete the mappings for the two users above
- self.id_mapping_api.purge_mappings({'public_id': user1['id']})
- self.id_mapping_api.purge_mappings({'public_id': user2['id']})
-
- # We should no longer be able to get these users via their old IDs
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- user1['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- user2['id'])
-
- # Now enumerate all users...this should re-build the mapping, and
- # we should be able to find the users via their original public IDs.
- self.identity_api.list_users()
- self.identity_api.get_user(user1['id'])
- self.identity_api.get_user(user2['id'])
-
- def test_get_roles_for_user_and_project_user_group_same_id(self):
- self.skipTest('N/A: We never generate the same ID for a user and '
- 'group in our mapping table')
-
- def test_list_domains(self):
- domains = self.resource_api.list_domains()
- self.assertEqual([resource.calc_default_domain()], domains)
-
-
-class BaseMultiLDAPandSQLIdentity(object):
- """Mixin class with support methods for domain-specific config testing."""
-
- def create_users_across_domains(self):
- """Create a set of users, each with a role on their own domain."""
- # We also will check that the right number of id mappings get created
- initial_mappings = len(mapping_sql.list_id_mappings())
-
- self.users['user0'] = unit.create_user(
- self.identity_api,
- self.domains['domain_default']['id'])
- self.assignment_api.create_grant(
- user_id=self.users['user0']['id'],
- domain_id=self.domains['domain_default']['id'],
- role_id=self.role_member['id'])
- for x in range(1, self.domain_count):
- self.users['user%s' % x] = unit.create_user(
- self.identity_api,
- self.domains['domain%s' % x]['id'])
- self.assignment_api.create_grant(
- user_id=self.users['user%s' % x]['id'],
- domain_id=self.domains['domain%s' % x]['id'],
- role_id=self.role_member['id'])
-
- # So how many new id mappings should have been created? One for each
- # user created in a domain that is using the non default driver..
- self.assertEqual(initial_mappings + self.domain_specific_count,
- len(mapping_sql.list_id_mappings()))
-
- def check_user(self, user, domain_id, expected_status):
- """Check user is in correct backend.
-
- As part of the tests, we want to force ourselves to manually
- select the driver for a given domain, to make sure the entity
- ended up in the correct backend.
-
- """
- driver = self.identity_api._select_identity_driver(domain_id)
- unused, unused, entity_id = (
- self.identity_api._get_domain_driver_and_entity_id(
- user['id']))
-
- if expected_status == http_client.OK:
- ref = driver.get_user(entity_id)
- ref = self.identity_api._set_domain_id_and_mapping(
- ref, domain_id, driver, map.EntityType.USER)
- user = user.copy()
- del user['password']
- self.assertDictEqual(user, ref)
- else:
- # TODO(henry-nash): Use AssertRaises here, although
- # there appears to be an issue with using driver.get_user
- # inside that construct
- try:
- driver.get_user(entity_id)
- except expected_status:
- pass
-
- def setup_initial_domains(self):
-
- def create_domain(domain):
- try:
- ref = self.resource_api.create_domain(
- domain['id'], domain)
- except exception.Conflict:
- ref = (
- self.resource_api.get_domain_by_name(domain['name']))
- return ref
-
- self.domains = {}
- for x in range(1, self.domain_count):
- domain = 'domain%s' % x
- self.domains[domain] = create_domain(
- {'id': uuid.uuid4().hex, 'name': domain})
- self.domains['domain_default'] = create_domain(
- resource.calc_default_domain())
-
- def test_authenticate_to_each_domain(self):
- """Test that a user in each domain can authenticate."""
- for user_num in range(self.domain_count):
- user = 'user%s' % user_num
- self.identity_api.authenticate(
- context={},
- user_id=self.users[user]['id'],
- password=self.users[user]['password'])
-
-
-class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
- unit.TestCase, BaseMultiLDAPandSQLIdentity):
- """Class to test common SQL plus individual LDAP backends.
-
- We define a set of domains and domain-specific backends:
-
- - A separate LDAP backend for the default domain
- - A separate LDAP backend for domain1
- - domain2 shares the same LDAP as domain1, but uses a different
- tree attach point
- - An SQL backend for all other domains (which will include domain3
- and domain4)
-
- Normally one would expect that the default domain would be handled as
- part of the "other domains" - however the above provides better
- test coverage since most of the existing backend tests use the default
- domain.
-
- """
-
- def setUp(self):
- sqldb = self.useFixture(database.Database())
- super(MultiLDAPandSQLIdentity, self).setUp()
-
- self.load_backends()
- sqldb.recreate()
-
- self.domain_count = 5
- self.domain_specific_count = 3
- self.setup_initial_domains()
- self._setup_initial_users()
-
- # All initial test data setup complete, time to switch on support
- # for separate backends per domain.
- self.enable_multi_domain()
-
- self.ldapdb.clear()
- self.load_fixtures(default_fixtures)
- self.create_users_across_domains()
- self.assert_backends()
-
- def assert_backends(self):
- _assert_backends(self,
- assignment='sql',
- identity={
- None: 'sql',
- self.domains['domain_default']['id']: 'ldap',
- self.domains['domain1']['id']: 'ldap',
- self.domains['domain2']['id']: 'ldap',
- },
- resource='sql')
-
- def config_overrides(self):
- super(MultiLDAPandSQLIdentity, self).config_overrides()
- # Make sure identity and assignment are actually SQL drivers,
- # BaseLDAPIdentity sets these options to use LDAP.
- self.config_fixture.config(group='identity', driver='sql')
- self.config_fixture.config(group='resource', driver='sql')
- self.config_fixture.config(group='assignment', driver='sql')
-
- def _setup_initial_users(self):
- # Create some identity entities BEFORE we switch to multi-backend, so
- # we can test that these are still accessible
- self.users = {}
- self.users['userA'] = unit.create_user(
- self.identity_api,
- self.domains['domain_default']['id'])
- self.users['userB'] = unit.create_user(
- self.identity_api,
- self.domains['domain1']['id'])
- self.users['userC'] = unit.create_user(
- self.identity_api,
- self.domains['domain3']['id'])
-
- def enable_multi_domain(self):
- """Enable the chosen form of multi domain configuration support.
-
- This method enables the file-based configuration support. Child classes
- that wish to use the database domain configuration support should
- override this method and set the appropriate config_fixture option.
-
- """
- self.config_fixture.config(
- group='identity', domain_specific_drivers_enabled=True,
- domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap',
- list_limit=1000)
- self.config_fixture.config(group='identity_mapping',
- backward_compatible_ids=False)
-
- def get_config(self, domain_id):
- # Get the config for this domain, will return CONF
- # if no specific config defined for this domain
- return self.identity_api.domain_configs.get_domain_conf(domain_id)
-
- def test_list_users(self):
- # Override the standard list users, since we have added an extra user
- # to the default domain, so the number of expected users is one more
- # than in the standard test.
- users = self.identity_api.list_users(
- domain_scope=self._set_domain_scope(
- CONF.identity.default_domain_id))
- self.assertEqual(len(default_fixtures.USERS) + 1, len(users))
- user_ids = set(user['id'] for user in users)
- expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
- for user in default_fixtures.USERS)
- expected_user_ids.add(self.users['user0']['id'])
- for user_ref in users:
- self.assertNotIn('password', user_ref)
- self.assertEqual(expected_user_ids, user_ids)
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get_all')
- def test_list_limit_domain_specific_inheritance(self, ldap_get_all):
- # passiging hints is important, because if it's not passed, limiting
- # is considered be disabled
- hints = driver_hints.Hints()
- self.identity_api.list_users(
- domain_scope=self.domains['domain2']['id'],
- hints=hints)
- # since list_limit is not specified in keystone.domain2.conf, it should
- # take the default, which is 1000
- self.assertTrue(ldap_get_all.called)
- args, kwargs = ldap_get_all.call_args
- hints = args[0]
- self.assertEqual(1000, hints.limit['limit'])
-
- @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get_all')
- def test_list_limit_domain_specific_override(self, ldap_get_all):
- # passiging hints is important, because if it's not passed, limiting
- # is considered to be disabled
- hints = driver_hints.Hints()
- self.identity_api.list_users(
- domain_scope=self.domains['domain1']['id'],
- hints=hints)
- # this should have the list_limit set in Keystone.domain1.conf, which
- # is 101
- self.assertTrue(ldap_get_all.called)
- args, kwargs = ldap_get_all.call_args
- hints = args[0]
- self.assertEqual(101, hints.limit['limit'])
-
- def test_domain_segregation(self):
- """Test that separate configs have segregated the domain.
-
- Test Plan:
-
- - Users were created in each domain as part of setup, now make sure
- you can only find a given user in its relevant domain/backend
- - Make sure that for a backend that supports multiple domains
- you can get the users via any of its domains
-
- """
- # Check that I can read a user with the appropriate domain-selected
- # driver, but won't find it via any other domain driver
-
- check_user = self.check_user
- check_user(self.users['user0'],
- self.domains['domain_default']['id'], http_client.OK)
- for domain in [self.domains['domain1']['id'],
- self.domains['domain2']['id'],
- self.domains['domain3']['id'],
- self.domains['domain4']['id']]:
- check_user(self.users['user0'], domain, exception.UserNotFound)
-
- check_user(self.users['user1'], self.domains['domain1']['id'],
- http_client.OK)
- for domain in [self.domains['domain_default']['id'],
- self.domains['domain2']['id'],
- self.domains['domain3']['id'],
- self.domains['domain4']['id']]:
- check_user(self.users['user1'], domain, exception.UserNotFound)
-
- check_user(self.users['user2'], self.domains['domain2']['id'],
- http_client.OK)
- for domain in [self.domains['domain_default']['id'],
- self.domains['domain1']['id'],
- self.domains['domain3']['id'],
- self.domains['domain4']['id']]:
- check_user(self.users['user2'], domain, exception.UserNotFound)
-
- # domain3 and domain4 share the same backend, so you should be
- # able to see user3 and user4 from either.
-
- check_user(self.users['user3'], self.domains['domain3']['id'],
- http_client.OK)
- check_user(self.users['user3'], self.domains['domain4']['id'],
- http_client.OK)
- check_user(self.users['user4'], self.domains['domain3']['id'],
- http_client.OK)
- check_user(self.users['user4'], self.domains['domain4']['id'],
- http_client.OK)
-
- for domain in [self.domains['domain_default']['id'],
- self.domains['domain1']['id'],
- self.domains['domain2']['id']]:
- check_user(self.users['user3'], domain, exception.UserNotFound)
- check_user(self.users['user4'], domain, exception.UserNotFound)
-
- # Finally, going through the regular manager layer, make sure we
- # only see the right number of users in each of the non-default
- # domains. One might have expected two users in domain1 (since we
- # created one before we switched to multi-backend), however since
- # that domain changed backends in the switch we don't find it anymore.
- # This is as designed - we don't support moving domains between
- # backends.
- #
- # The listing of the default domain is already handled in the
- # test_lists_users() method.
- for domain in [self.domains['domain1']['id'],
- self.domains['domain2']['id'],
- self.domains['domain4']['id']]:
- self.assertThat(
- self.identity_api.list_users(domain_scope=domain),
- matchers.HasLength(1))
-
- # domain3 had a user created before we switched on
- # multiple backends, plus one created afterwards - and its
- # backend has not changed - so we should find two.
- self.assertThat(
- self.identity_api.list_users(
- domain_scope=self.domains['domain3']['id']),
- matchers.HasLength(2))
-
- def test_existing_uuids_work(self):
- """Test that 'uni-domain' created IDs still work.
-
- Throwing the switch to domain-specific backends should not cause
- existing identities to be inaccessible via ID.
-
- """
- self.identity_api.get_user(self.users['userA']['id'])
- self.identity_api.get_user(self.users['userB']['id'])
- self.identity_api.get_user(self.users['userC']['id'])
-
- def test_scanning_of_config_dir(self):
- """Test the Manager class scans the config directory.
-
- The setup for the main tests above load the domain configs directly
- so that the test overrides can be included. This test just makes sure
- that the standard config directory scanning does pick up the relevant
- domain config files.
-
- """
- # Confirm that config has drivers_enabled as True, which we will
- # check has been set to False later in this test
- self.assertTrue(CONF.identity.domain_specific_drivers_enabled)
- self.load_backends()
- # Execute any command to trigger the lazy loading of domain configs
- self.identity_api.list_users(
- domain_scope=self.domains['domain1']['id'])
- # ...and now check the domain configs have been set up
- self.assertIn('default', self.identity_api.domain_configs)
- self.assertIn(self.domains['domain1']['id'],
- self.identity_api.domain_configs)
- self.assertIn(self.domains['domain2']['id'],
- self.identity_api.domain_configs)
- self.assertNotIn(self.domains['domain3']['id'],
- self.identity_api.domain_configs)
- self.assertNotIn(self.domains['domain4']['id'],
- self.identity_api.domain_configs)
-
- # Finally check that a domain specific config contains items from both
- # the primary config and the domain specific config
- conf = self.identity_api.domain_configs.get_domain_conf(
- self.domains['domain1']['id'])
- # This should now be false, as is the default, since this is not
- # set in the standard primary config file
- self.assertFalse(conf.identity.domain_specific_drivers_enabled)
- # ..and make sure a domain-specific options is also set
- self.assertEqual('fake://memory1', conf.ldap.url)
-
- def test_delete_domain_with_user_added(self):
- domain = unit.new_domain_ref()
- project = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_domain(domain['id'], domain)
- project = self.resource_api.create_project(project['id'], project)
- project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project, project_ref)
-
- self.assignment_api.create_grant(user_id=self.user_foo['id'],
- project_id=project['id'],
- role_id=self.role_member['id'])
- self.assignment_api.delete_grant(user_id=self.user_foo['id'],
- project_id=project['id'],
- role_id=self.role_member['id'])
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
- self.resource_api.delete_domain(domain['id'])
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- def test_user_enabled_ignored_disable_error(self):
- # Override.
- self.skipTest("Doesn't apply since LDAP config has no affect on the "
- "SQL identity backend.")
-
- def test_group_enabled_ignored_disable_error(self):
- # Override.
- self.skipTest("Doesn't apply since LDAP config has no affect on the "
- "SQL identity backend.")
-
- def test_project_enabled_ignored_disable_error(self):
- # Override
- self.skipTest("Doesn't apply since LDAP configuration is ignored for "
- "SQL assignment backend.")
-
- def test_list_role_assignments_filtered_by_role(self):
- # Domain roles are supported by the SQL Assignment backend
- base = super(BaseLDAPIdentity, self)
- base.test_list_role_assignments_filtered_by_role()
-
- def test_list_role_assignment_by_domain(self):
- # With multi LDAP this method should work, so override the override
- # from BaseLDAPIdentity
- super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain()
-
- def test_list_role_assignment_by_user_with_domain_group_roles(self):
- # With multi LDAP this method should work, so override the override
- # from BaseLDAPIdentity
- super(BaseLDAPIdentity, self).\
- test_list_role_assignment_by_user_with_domain_group_roles()
-
- def test_list_role_assignment_using_sourced_groups_with_domains(self):
- # With SQL Assignment this method should work, so override the override
- # from BaseLDAPIdentity
- base = super(BaseLDAPIdentity, self)
- base.test_list_role_assignment_using_sourced_groups_with_domains()
-
- def test_create_project_with_domain_id_and_without_parent_id(self):
- # With multi LDAP this method should work, so override the override
- # from BaseLDAPIdentity
- super(BaseLDAPIdentity, self).\
- test_create_project_with_domain_id_and_without_parent_id()
-
- def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
- # With multi LDAP this method should work, so override the override
- # from BaseLDAPIdentity
- super(BaseLDAPIdentity, self).\
- test_create_project_with_domain_id_mismatch_to_parent_domain()
-
- def test_remove_foreign_assignments_when_deleting_a_domain(self):
- # With multi LDAP this method should work, so override the override
- # from BaseLDAPIdentity
- base = super(BaseLDAPIdentity, self)
- base.test_remove_foreign_assignments_when_deleting_a_domain()
-
-
-class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
- """Class to test the use of domain configs stored in the database.
-
- Repeat the same tests as MultiLDAPandSQLIdentity, but instead of using the
- domain specific config files, store the domain specific values in the
- database.
-
- """
-
- def assert_backends(self):
- _assert_backends(self,
- assignment='sql',
- identity={
- None: 'sql',
- self.domains['domain_default']['id']: 'ldap',
- self.domains['domain1']['id']: 'ldap',
- self.domains['domain2']['id']: 'ldap',
- },
- resource='sql')
-
- def enable_multi_domain(self):
- # The values below are the same as in the domain_configs_multi_ldap
- # directory of test config_files.
- default_config = {
- 'ldap': {'url': 'fake://memory',
- 'user': 'cn=Admin',
- 'password': 'password',
- 'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'ldap'}
- }
- domain1_config = {
- 'ldap': {'url': 'fake://memory1',
- 'user': 'cn=Admin',
- 'password': 'password',
- 'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'ldap',
- 'list_limit': '101'}
- }
- domain2_config = {
- 'ldap': {'url': 'fake://memory',
- 'user': 'cn=Admin',
- 'password': 'password',
- 'suffix': 'cn=myroot,cn=com',
- 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
- 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
- 'identity': {'driver': 'ldap'}
- }
-
- self.domain_config_api.create_config(CONF.identity.default_domain_id,
- default_config)
- self.domain_config_api.create_config(self.domains['domain1']['id'],
- domain1_config)
- self.domain_config_api.create_config(self.domains['domain2']['id'],
- domain2_config)
-
- self.config_fixture.config(
- group='identity', domain_specific_drivers_enabled=True,
- domain_configurations_from_database=True,
- list_limit=1000)
- self.config_fixture.config(group='identity_mapping',
- backward_compatible_ids=False)
-
- def test_domain_config_has_no_impact_if_database_support_disabled(self):
- """Ensure database domain configs have no effect if disabled.
-
- Set reading from database configs to false, restart the backends
- and then try and set and use database configs.
-
- """
- self.config_fixture.config(
- group='identity', domain_configurations_from_database=False)
- self.load_backends()
- new_config = {'ldap': {'url': uuid.uuid4().hex}}
- self.domain_config_api.create_config(
- CONF.identity.default_domain_id, new_config)
- # Trigger the identity backend to initialise any domain specific
- # configurations
- self.identity_api.list_users()
- # Check that the new config has not been passed to the driver for
- # the default domain.
- default_config = (
- self.identity_api.domain_configs.get_domain_conf(
- CONF.identity.default_domain_id))
- self.assertEqual(CONF.ldap.url, default_config.ldap.url)
-
- def test_reloading_domain_config(self):
- """Ensure domain drivers are reloaded on a config modification."""
- domain_cfgs = self.identity_api.domain_configs
-
- # Create a new config for the default domain, hence overwriting the
- # current settings.
- new_config = {
- 'ldap': {'url': uuid.uuid4().hex},
- 'identity': {'driver': 'ldap'}}
- self.domain_config_api.create_config(
- CONF.identity.default_domain_id, new_config)
- default_config = (
- domain_cfgs.get_domain_conf(CONF.identity.default_domain_id))
- self.assertEqual(new_config['ldap']['url'], default_config.ldap.url)
-
- # Ensure updating is also honored
- updated_config = {'url': uuid.uuid4().hex}
- self.domain_config_api.update_config(
- CONF.identity.default_domain_id, updated_config,
- group='ldap', option='url')
- default_config = (
- domain_cfgs.get_domain_conf(CONF.identity.default_domain_id))
- self.assertEqual(updated_config['url'], default_config.ldap.url)
-
- # ...and finally ensure delete causes the driver to get the standard
- # config again.
- self.domain_config_api.delete_config(CONF.identity.default_domain_id)
- default_config = (
- domain_cfgs.get_domain_conf(CONF.identity.default_domain_id))
- self.assertEqual(CONF.ldap.url, default_config.ldap.url)
-
- def test_setting_multiple_sql_driver_raises_exception(self):
- """Ensure setting multiple domain specific sql drivers is prevented."""
- new_config = {'identity': {'driver': 'sql'}}
- self.domain_config_api.create_config(
- CONF.identity.default_domain_id, new_config)
- self.identity_api.domain_configs.get_domain_conf(
- CONF.identity.default_domain_id)
- self.domain_config_api.create_config(self.domains['domain1']['id'],
- new_config)
- self.assertRaises(exception.MultipleSQLDriversInConfig,
- self.identity_api.domain_configs.get_domain_conf,
- self.domains['domain1']['id'])
-
- def test_same_domain_gets_sql_driver(self):
- """Ensure we can set an SQL driver if we have had it before."""
- new_config = {'identity': {'driver': 'sql'}}
- self.domain_config_api.create_config(
- CONF.identity.default_domain_id, new_config)
- self.identity_api.domain_configs.get_domain_conf(
- CONF.identity.default_domain_id)
-
- # By using a slightly different config, we cause the driver to be
- # reloaded...and hence check if we can reuse the sql driver
- new_config = {'identity': {'driver': 'sql'},
- 'ldap': {'url': 'fake://memory1'}}
- self.domain_config_api.create_config(
- CONF.identity.default_domain_id, new_config)
- self.identity_api.domain_configs.get_domain_conf(
- CONF.identity.default_domain_id)
-
- def test_delete_domain_clears_sql_registration(self):
- """Ensure registration is deleted when a domain is deleted."""
- domain = unit.new_domain_ref()
- domain = self.resource_api.create_domain(domain['id'], domain)
- new_config = {'identity': {'driver': 'sql'}}
- self.domain_config_api.create_config(domain['id'], new_config)
- self.identity_api.domain_configs.get_domain_conf(domain['id'])
-
- # First show that trying to set SQL for another driver fails
- self.domain_config_api.create_config(self.domains['domain1']['id'],
- new_config)
- self.assertRaises(exception.MultipleSQLDriversInConfig,
- self.identity_api.domain_configs.get_domain_conf,
- self.domains['domain1']['id'])
- self.domain_config_api.delete_config(self.domains['domain1']['id'])
-
- # Now we delete the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
- self.resource_api.delete_domain(domain['id'])
-
- # The registration should now be available
- self.domain_config_api.create_config(self.domains['domain1']['id'],
- new_config)
- self.identity_api.domain_configs.get_domain_conf(
- self.domains['domain1']['id'])
-
- def test_orphaned_registration_does_not_prevent_getting_sql_driver(self):
- """Ensure we self heal an orphaned sql registration."""
- domain = unit.new_domain_ref()
- domain = self.resource_api.create_domain(domain['id'], domain)
- new_config = {'identity': {'driver': 'sql'}}
- self.domain_config_api.create_config(domain['id'], new_config)
- self.identity_api.domain_configs.get_domain_conf(domain['id'])
-
- # First show that trying to set SQL for another driver fails
- self.domain_config_api.create_config(self.domains['domain1']['id'],
- new_config)
- self.assertRaises(exception.MultipleSQLDriversInConfig,
- self.identity_api.domain_configs.get_domain_conf,
- self.domains['domain1']['id'])
-
- # Now we delete the domain by using the backend driver directly,
- # which causes the domain to be deleted without any of the cleanup
- # that is in the manager (this is simulating a server process crashing
- # in the middle of a delete domain operation, and somehow leaving the
- # domain config settings in place, but the domain is deleted). We
- # should still be able to set another domain to SQL, since we should
- # self heal this issue.
-
- self.resource_api.driver.delete_project(domain['id'])
- # Invalidate cache (so we will see the domain has gone)
- self.resource_api.get_domain.invalidate(
- self.resource_api, domain['id'])
-
- # The registration should now be available
- self.domain_config_api.create_config(self.domains['domain1']['id'],
- new_config)
- self.identity_api.domain_configs.get_domain_conf(
- self.domains['domain1']['id'])
-
-
-class DomainSpecificLDAPandSQLIdentity(
- BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase,
- BaseMultiLDAPandSQLIdentity):
- """Class to test when all domains use specific configs, including SQL.
-
- We define a set of domains and domain-specific backends:
-
- - A separate LDAP backend for the default domain
- - A separate SQL backend for domain1
-
- Although the default driver still exists, we don't use it.
-
- """
-
- def setUp(self):
- sqldb = self.useFixture(database.Database())
- super(DomainSpecificLDAPandSQLIdentity, self).setUp()
- self.initial_setup(sqldb)
-
- def initial_setup(self, sqldb):
- # We aren't setting up any initial data ahead of switching to
- # domain-specific operation, so make the switch straight away.
- self.config_fixture.config(
- group='identity', domain_specific_drivers_enabled=True,
- domain_config_dir=(
- unit.TESTCONF + '/domain_configs_one_sql_one_ldap'))
- self.config_fixture.config(group='identity_mapping',
- backward_compatible_ids=False)
-
- self.load_backends()
- sqldb.recreate()
-
- self.domain_count = 2
- self.domain_specific_count = 2
- self.setup_initial_domains()
- self.users = {}
-
- self.ldapdb.clear()
- self.load_fixtures(default_fixtures)
- self.create_users_across_domains()
-
- _assert_backends(
- self,
- assignment='sql',
- identity={
- None: 'ldap',
- 'default': 'ldap',
- self.domains['domain1']['id']: 'sql',
- },
- resource='sql')
-
- def config_overrides(self):
- super(DomainSpecificLDAPandSQLIdentity, self).config_overrides()
- # Make sure resource & assignment are actually SQL drivers,
- # BaseLDAPIdentity causes this option to use LDAP.
- self.config_fixture.config(group='resource', driver='sql')
- self.config_fixture.config(group='assignment', driver='sql')
-
- def get_config(self, domain_id):
- # Get the config for this domain, will return CONF
- # if no specific config defined for this domain
- return self.identity_api.domain_configs.get_domain_conf(domain_id)
-
- def test_list_domains(self):
- self.skipTest(
- 'N/A: Not relevant for multi ldap testing')
-
- def test_list_domains_non_default_domain_id(self):
- self.skipTest(
- 'N/A: Not relevant for multi ldap testing')
-
- def test_domain_crud(self):
- self.skipTest(
- 'N/A: Not relevant for multi ldap testing')
-
- def test_not_delete_domain_with_enabled_subdomains(self):
- self.skipTest(
- 'N/A: Not relevant for multi ldap testing')
-
- def test_delete_domain(self):
- # With this restricted multi LDAP class, tests that use multiple
- # domains and identity, are still not supported
- self.assertRaises(
- exception.DomainNotFound,
- super(BaseLDAPIdentity, self).test_delete_domain_with_project_api)
-
- def test_list_users(self):
- # Override the standard list users, since we have added an extra user
- # to the default domain, so the number of expected users is one more
- # than in the standard test.
- users = self.identity_api.list_users(
- domain_scope=self._set_domain_scope(
- CONF.identity.default_domain_id))
- self.assertEqual(len(default_fixtures.USERS) + 1, len(users))
- user_ids = set(user['id'] for user in users)
- expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
- for user in default_fixtures.USERS)
- expected_user_ids.add(self.users['user0']['id'])
- for user_ref in users:
- self.assertNotIn('password', user_ref)
- self.assertEqual(expected_user_ids, user_ids)
-
- def test_domain_segregation(self):
- """Test that separate configs have segregated the domain.
-
- Test Plan:
-
- - Users were created in each domain as part of setup, now make sure
- you can only find a given user in its relevant domain/backend
- - Make sure that for a backend that supports multiple domains
- you can get the users via any of its domains
-
- """
- # Check that I can read a user with the appropriate domain-selected
- # driver, but won't find it via any other domain driver
-
- self.check_user(self.users['user0'],
- self.domains['domain_default']['id'], http_client.OK)
- self.check_user(self.users['user0'],
- self.domains['domain1']['id'], exception.UserNotFound)
-
- self.check_user(self.users['user1'],
- self.domains['domain1']['id'], http_client.OK)
- self.check_user(self.users['user1'],
- self.domains['domain_default']['id'],
- exception.UserNotFound)
-
- # Finally, going through the regular manager layer, make sure we
- # only see the right number of users in the non-default domain.
-
- self.assertThat(
- self.identity_api.list_users(
- domain_scope=self.domains['domain1']['id']),
- matchers.HasLength(1))
-
- def test_add_role_grant_to_user_and_project_returns_not_found(self):
- self.skipTest('Blocked by bug 1101287')
-
- def test_get_role_grants_for_user_and_project_returns_not_found(self):
- self.skipTest('Blocked by bug 1101287')
-
- def test_list_projects_for_user_with_grants(self):
- self.skipTest('Blocked by bug 1221805')
-
- def test_get_roles_for_user_and_project_user_group_same_id(self):
- self.skipTest('N/A: We never generate the same ID for a user and '
- 'group in our mapping table')
-
- def test_user_id_comma(self):
- self.skipTest('Only valid if it is guaranteed to be talking to '
- 'the fakeldap backend')
-
- def test_user_id_comma_grants(self):
- self.skipTest('Only valid if it is guaranteed to be talking to '
- 'the fakeldap backend')
-
- def test_user_enabled_ignored_disable_error(self):
- # Override.
- self.skipTest("Doesn't apply since LDAP config has no affect on the "
- "SQL identity backend.")
-
- def test_group_enabled_ignored_disable_error(self):
- # Override.
- self.skipTest("Doesn't apply since LDAP config has no affect on the "
- "SQL identity backend.")
-
- def test_project_enabled_ignored_disable_error(self):
- # Override
- self.skipTest("Doesn't apply since LDAP configuration is ignored for "
- "SQL assignment backend.")
-
- def test_list_role_assignments_filtered_by_role(self):
- # Domain roles are supported by the SQL Assignment backend
- base = super(BaseLDAPIdentity, self)
- base.test_list_role_assignments_filtered_by_role()
-
- def test_delete_domain_with_project_api(self):
- # With this restricted multi LDAP class, tests that use multiple
- # domains and identity, are still not supported
- self.assertRaises(
- exception.DomainNotFound,
- super(BaseLDAPIdentity, self).test_delete_domain_with_project_api)
-
- def test_create_project_with_domain_id_and_without_parent_id(self):
- # With restricted multi LDAP, tests that don't use identity, but do
- # required aditional domains will work
- base = super(BaseLDAPIdentity, self)
- base.test_create_project_with_domain_id_and_without_parent_id()
-
- def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
- # With restricted multi LDAP, tests that don't use identity, but do
- # required aditional domains will work
- base = super(BaseLDAPIdentity, self)
- base.test_create_project_with_domain_id_mismatch_to_parent_domain()
-
-
-class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
- """Class to test simplest use of domain-specific SQL driver.
-
- The simplest use of an SQL domain-specific backend is when it is used to
- augment the standard case when LDAP is the default driver defined in the
- main config file. This would allow, for example, service users to be
- stored in SQL while LDAP handles the rest. Hence we define:
-
- - The default driver uses the LDAP backend for the default domain
- - A separate SQL backend for domain1
-
- """
-
- def initial_setup(self, sqldb):
- # We aren't setting up any initial data ahead of switching to
- # domain-specific operation, so make the switch straight away.
- self.config_fixture.config(
- group='identity', domain_specific_drivers_enabled=True,
- domain_config_dir=(
- unit.TESTCONF + '/domain_configs_default_ldap_one_sql'))
- # Part of the testing counts how many new mappings get created as
- # we create users, so ensure we are NOT using mapping for the default
- # LDAP domain so this doesn't confuse the calculation.
- self.config_fixture.config(group='identity_mapping',
- backward_compatible_ids=True)
-
- self.load_backends()
- sqldb.recreate()
-
- self.domain_count = 2
- self.domain_specific_count = 1
- self.setup_initial_domains()
- self.users = {}
-
- self.load_fixtures(default_fixtures)
- self.create_users_across_domains()
-
- _assert_backends(self,
- assignment='sql',
- identity='ldap',
- resource='sql')
-
- def config_overrides(self):
- super(DomainSpecificSQLIdentity, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='resource', driver='sql')
- self.config_fixture.config(group='assignment', driver='sql')
-
- def get_config(self, domain_id):
- if domain_id == CONF.identity.default_domain_id:
- return CONF
- else:
- return self.identity_api.domain_configs.get_domain_conf(domain_id)
-
- def test_default_sql_plus_sql_specific_driver_fails(self):
- # First confirm that if ldap is default driver, domain1 can be
- # loaded as sql
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='assignment', driver='sql')
- self.load_backends()
- # Make any identity call to initiate the lazy loading of configs
- self.identity_api.list_users(
- domain_scope=CONF.identity.default_domain_id)
- self.assertIsNotNone(self.get_config(self.domains['domain1']['id']))
-
- # Now re-initialize, but with sql as the identity driver
- self.config_fixture.config(group='identity', driver='sql')
- self.config_fixture.config(group='assignment', driver='sql')
- self.load_backends()
- # Make any identity call to initiate the lazy loading of configs, which
- # should fail since we would now have two sql drivers.
- self.assertRaises(exception.MultipleSQLDriversInConfig,
- self.identity_api.list_users,
- domain_scope=CONF.identity.default_domain_id)
-
- def test_multiple_sql_specific_drivers_fails(self):
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='assignment', driver='sql')
- self.load_backends()
- # Ensure default, domain1 and domain2 exist
- self.domain_count = 3
- self.setup_initial_domains()
- # Make any identity call to initiate the lazy loading of configs
- self.identity_api.list_users(
- domain_scope=CONF.identity.default_domain_id)
- # This will only load domain1, since the domain2 config file is
- # not stored in the same location
- self.assertIsNotNone(self.get_config(self.domains['domain1']['id']))
-
- # Now try and manually load a 2nd sql specific driver, for domain2,
- # which should fail.
- self.assertRaises(
- exception.MultipleSQLDriversInConfig,
- self.identity_api.domain_configs._load_config_from_file,
- self.resource_api,
- [unit.TESTCONF + '/domain_configs_one_extra_sql/' +
- 'keystone.domain2.conf'],
- 'domain2')
-
-
-class LdapFilterTests(identity_tests.FilterTests, unit.TestCase):
-
- def setUp(self):
- super(LdapFilterTests, self).setUp()
- sqldb = self.useFixture(database.Database())
- self.useFixture(ldapdb.LDAPDatabase())
-
- self.load_backends()
- self.load_fixtures(default_fixtures)
- sqldb.recreate()
- _assert_backends(self, identity='ldap')
-
- def config_overrides(self):
- super(LdapFilterTests, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
-
- def config_files(self):
- config_files = super(LdapFilterTests, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
- return config_files
-
- @wip('Not supported by LDAP identity driver')
- def test_list_users_in_group_inexact_filtered(self):
- # The LDAP identity driver currently does not support filtering on the
- # listing users for a given group, so will fail this test.
- super(LdapFilterTests,
- self).test_list_users_in_group_inexact_filtered()
-
- @wip('Not supported by LDAP identity driver')
- def test_list_users_in_group_exact_filtered(self):
- # The LDAP identity driver currently does not support filtering on the
- # listing users for a given group, so will fail this test.
- super(LdapFilterTests, self).test_list_users_in_group_exact_filtered()
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
deleted file mode 100644
index ec789d04..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2012 OpenStack Foundation
-# Copyright 2013 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import ldappool
-import mock
-from oslo_config import cfg
-from oslotest import mockpatch
-
-from keystone.common.ldap import core as ldap_core
-from keystone.identity.backends import ldap
-from keystone.tests import unit
-from keystone.tests.unit import fakeldap
-from keystone.tests.unit import test_backend_ldap
-
-CONF = cfg.CONF
-
-
-class LdapPoolCommonTestMixin(object):
- """LDAP pool specific common tests used here and in live tests."""
-
- def cleanup_pools(self):
- ldap_core.PooledLDAPHandler.connection_pools.clear()
-
- def test_handler_with_use_pool_enabled(self):
- # by default use_pool and use_auth_pool is enabled in test pool config
- user_ref = self.identity_api.get_user(self.user_foo['id'])
- self.user_foo.pop('password')
- self.assertDictEqual(self.user_foo, user_ref)
-
- handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True)
- self.assertIsInstance(handler, ldap_core.PooledLDAPHandler)
-
- @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'connect')
- @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'simple_bind_s')
- def test_handler_with_use_pool_not_enabled(self, bind_method,
- connect_method):
- self.config_fixture.config(group='ldap', use_pool=False)
- self.config_fixture.config(group='ldap', use_auth_pool=True)
- self.cleanup_pools()
-
- user_api = ldap.UserApi(CONF)
- handler = user_api.get_connection(user=None, password=None,
- end_user_auth=True)
- # use_auth_pool flag does not matter when use_pool is False
- # still handler is non pool version
- self.assertIsInstance(handler.conn, ldap_core.PythonLDAPHandler)
-
- @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'connect')
- @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'simple_bind_s')
- def test_handler_with_end_user_auth_use_pool_not_enabled(self, bind_method,
- connect_method):
- # by default use_pool is enabled in test pool config
- # now disabling use_auth_pool flag to test handler instance
- self.config_fixture.config(group='ldap', use_auth_pool=False)
- self.cleanup_pools()
-
- user_api = ldap.UserApi(CONF)
- handler = user_api.get_connection(user=None, password=None,
- end_user_auth=True)
- self.assertIsInstance(handler.conn, ldap_core.PythonLDAPHandler)
-
- # For end_user_auth case, flag should not be false otherwise
- # it will use, admin connections ldap pool
- handler = user_api.get_connection(user=None, password=None,
- end_user_auth=False)
- self.assertIsInstance(handler.conn, ldap_core.PooledLDAPHandler)
-
- def test_pool_size_set(self):
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- self.assertEqual(CONF.ldap.pool_size, ldappool_cm.size)
-
- def test_pool_retry_max_set(self):
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- self.assertEqual(CONF.ldap.pool_retry_max, ldappool_cm.retry_max)
-
- def test_pool_retry_delay_set(self):
- # just make one identity call to initiate ldap connection if not there
- self.identity_api.get_user(self.user_foo['id'])
-
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- self.assertEqual(CONF.ldap.pool_retry_delay, ldappool_cm.retry_delay)
-
- def test_pool_use_tls_set(self):
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- self.assertEqual(CONF.ldap.use_tls, ldappool_cm.use_tls)
-
- def test_pool_timeout_set(self):
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- self.assertEqual(CONF.ldap.pool_connection_timeout,
- ldappool_cm.timeout)
-
- def test_pool_use_pool_set(self):
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- self.assertEqual(CONF.ldap.use_pool, ldappool_cm.use_pool)
-
- def test_pool_connection_lifetime_set(self):
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- self.assertEqual(CONF.ldap.pool_connection_lifetime,
- ldappool_cm.max_lifetime)
-
- def test_max_connection_error_raised(self):
-
- who = CONF.ldap.user
- cred = CONF.ldap.password
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- ldappool_cm.size = 2
-
- # 3rd connection attempt should raise Max connection error
- with ldappool_cm.connection(who, cred) as _: # conn1
- with ldappool_cm.connection(who, cred) as _: # conn2
- try:
- with ldappool_cm.connection(who, cred) as _: # conn3
- _.unbind_s()
- self.fail()
- except Exception as ex:
- self.assertIsInstance(ex,
- ldappool.MaxConnectionReachedError)
- ldappool_cm.size = CONF.ldap.pool_size
-
- def test_pool_size_expands_correctly(self):
-
- who = CONF.ldap.user
- cred = CONF.ldap.password
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
- ldappool_cm.size = 3
-
- def _get_conn():
- return ldappool_cm.connection(who, cred)
-
- # Open 3 connections first
- with _get_conn() as _: # conn1
- self.assertEqual(1, len(ldappool_cm))
- with _get_conn() as _: # conn2
- self.assertEqual(2, len(ldappool_cm))
- with _get_conn() as _: # conn2
- _.unbind_ext_s()
- self.assertEqual(3, len(ldappool_cm))
-
- # Then open 3 connections again and make sure size does not grow
- # over 3
- with _get_conn() as _: # conn1
- self.assertEqual(1, len(ldappool_cm))
- with _get_conn() as _: # conn2
- self.assertEqual(2, len(ldappool_cm))
- with _get_conn() as _: # conn3
- _.unbind_ext_s()
- self.assertEqual(3, len(ldappool_cm))
-
- def test_password_change_with_pool(self):
- old_password = self.user_sna['password']
- self.cleanup_pools()
-
- # authenticate so that connection is added to pool before password
- # change
- user_ref = self.identity_api.authenticate(
- context={},
- user_id=self.user_sna['id'],
- password=self.user_sna['password'])
-
- self.user_sna.pop('password')
- self.user_sna['enabled'] = True
- self.assertDictEqual(self.user_sna, user_ref)
-
- new_password = 'new_password'
- user_ref['password'] = new_password
- self.identity_api.update_user(user_ref['id'], user_ref)
-
- # now authenticate again to make sure new password works with
- # connection pool
- user_ref2 = self.identity_api.authenticate(
- context={},
- user_id=self.user_sna['id'],
- password=new_password)
-
- user_ref.pop('password')
- self.assertDictEqual(user_ref, user_ref2)
-
- # Authentication with old password would not work here as there
- # is only one connection in pool which get bind again with updated
- # password..so no old bind is maintained in this case.
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=self.user_sna['id'],
- password=old_password)
-
-
-class LDAPIdentity(LdapPoolCommonTestMixin,
- test_backend_ldap.LDAPIdentity,
- unit.TestCase):
- """Executes tests in existing base class with pooled LDAP handler."""
-
- def setUp(self):
- self.useFixture(mockpatch.PatchObject(
- ldap_core.PooledLDAPHandler, 'Connector', fakeldap.FakeLdapPool))
- super(LDAPIdentity, self).setUp()
-
- self.addCleanup(self.cleanup_pools)
- # storing to local variable to avoid long references
- self.conn_pools = ldap_core.PooledLDAPHandler.connection_pools
- # super class loads db fixtures which establishes ldap connection
- # so adding dummy call to highlight connection pool initialization
- # as its not that obvious though its not needed here
- self.identity_api.get_user(self.user_foo['id'])
-
- def config_files(self):
- config_files = super(LDAPIdentity, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap_pool.conf'))
- return config_files
-
- @mock.patch.object(ldap_core, 'utf8_encode')
- def test_utf8_encoded_is_used_in_pool(self, mocked_method):
- def side_effect(arg):
- return arg
- mocked_method.side_effect = side_effect
- # invalidate the cache to get utf8_encode function called.
- self.identity_api.get_user.invalidate(self.identity_api,
- self.user_foo['id'])
- self.identity_api.get_user(self.user_foo['id'])
- mocked_method.assert_any_call(CONF.ldap.user)
- mocked_method.assert_any_call(CONF.ldap.password)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_rules.py b/keystone-moon/keystone/tests/unit/test_backend_rules.py
deleted file mode 100644
index c32c3307..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_rules.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit.policy import test_backends as policy_tests
-
-
-class RulesPolicy(unit.TestCase, policy_tests.PolicyTests):
- def setUp(self):
- super(RulesPolicy, self).setUp()
- self.load_backends()
-
- def config_overrides(self):
- super(RulesPolicy, self).config_overrides()
- self.config_fixture.config(group='policy', driver='rules')
-
- def test_create(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_create)
-
- def test_get(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_get)
-
- def test_list(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_list)
-
- def test_update(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_update)
-
- def test_delete(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_delete)
-
- def test_get_policy_returns_not_found(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy,
- self).test_get_policy_returns_not_found)
-
- def test_update_policy_returns_not_found(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy,
- self).test_update_policy_returns_not_found)
-
- def test_delete_policy_returns_not_found(self):
- self.assertRaises(exception.NotImplemented,
- super(RulesPolicy,
- self).test_delete_policy_returns_not_found)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_sql.py b/keystone-moon/keystone/tests/unit/test_backend_sql.py
deleted file mode 100644
index 2e703fff..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_sql.py
+++ /dev/null
@@ -1,1025 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import functools
-import uuid
-
-import mock
-from oslo_config import cfg
-from oslo_db import exception as db_exception
-from oslo_db import options
-from six.moves import range
-import sqlalchemy
-from sqlalchemy import exc
-from testtools import matchers
-
-from keystone.common import driver_hints
-from keystone.common import sql
-from keystone import exception
-from keystone.identity.backends import sql as identity_sql
-from keystone import resource
-from keystone.tests import unit
-from keystone.tests.unit.assignment import test_backends as assignment_tests
-from keystone.tests.unit.catalog import test_backends as catalog_tests
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.identity import test_backends as identity_tests
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit.policy import test_backends as policy_tests
-from keystone.tests.unit.resource import test_backends as resource_tests
-from keystone.tests.unit.token import test_backends as token_tests
-from keystone.tests.unit.trust import test_backends as trust_tests
-from keystone.token.persistence.backends import sql as token_sql
-
-
-CONF = cfg.CONF
-
-
-class SqlTests(unit.SQLDriverOverrides, unit.TestCase):
-
- def setUp(self):
- super(SqlTests, self).setUp()
- self.useFixture(database.Database(self.sql_driver_version_overrides))
- self.load_backends()
-
- # populate the engine with tables & fixtures
- self.load_fixtures(default_fixtures)
- # defaulted by the data load
- self.user_foo['enabled'] = True
-
- def config_files(self):
- config_files = super(SqlTests, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
-
-class SqlModels(SqlTests):
-
- def select_table(self, name):
- table = sqlalchemy.Table(name,
- sql.ModelBase.metadata,
- autoload=True)
- s = sqlalchemy.select([table])
- return s
-
- def assertExpectedSchema(self, table, expected_schema):
- """Assert that a table's schema is what we expect.
-
- :param string table: the name of the table to inspect
- :param tuple expected_schema: a tuple of tuples containing the
- expected schema
- :raises AssertionError: when the database schema doesn't match the
- expected schema
-
- The expected_schema format is simply::
-
- (
- ('column name', sql type, qualifying detail),
- ...
- )
-
- The qualifying detail varies based on the type of the column::
-
- - sql.Boolean columns must indicate the column's default value or
- None if there is no default
- - Columns with a length, like sql.String, must indicate the
- column's length
- - All other column types should use None
-
- Example::
-
- cols = (('id', sql.String, 64),
- ('enabled', sql.Boolean, True),
- ('extra', sql.JsonBlob, None))
- self.assertExpectedSchema('table_name', cols)
-
- """
- table = self.select_table(table)
-
- actual_schema = []
- for column in table.c:
- if isinstance(column.type, sql.Boolean):
- default = None
- if column._proxies[0].default:
- default = column._proxies[0].default.arg
- actual_schema.append((column.name, type(column.type), default))
- elif (hasattr(column.type, 'length') and
- not isinstance(column.type, sql.Enum)):
- # NOTE(dstanek): Even though sql.Enum columns have a length
- # set we don't want to catch them here. Maybe in the future
- # we'll check to see that they contain a list of the correct
- # possible values.
- actual_schema.append((column.name,
- type(column.type),
- column.type.length))
- else:
- actual_schema.append((column.name, type(column.type), None))
-
- self.assertItemsEqual(expected_schema, actual_schema)
-
- def test_user_model(self):
- cols = (('id', sql.String, 64),
- ('default_project_id', sql.String, 64),
- ('enabled', sql.Boolean, None),
- ('extra', sql.JsonBlob, None))
- self.assertExpectedSchema('user', cols)
-
- def test_local_user_model(self):
- cols = (('id', sql.Integer, None),
- ('user_id', sql.String, 64),
- ('name', sql.String, 255),
- ('domain_id', sql.String, 64))
- self.assertExpectedSchema('local_user', cols)
-
- def test_password_model(self):
- cols = (('id', sql.Integer, None),
- ('local_user_id', sql.Integer, None),
- ('password', sql.String, 128))
- self.assertExpectedSchema('password', cols)
-
- def test_federated_user_model(self):
- cols = (('id', sql.Integer, None),
- ('user_id', sql.String, 64),
- ('idp_id', sql.String, 64),
- ('protocol_id', sql.String, 64),
- ('unique_id', sql.String, 255),
- ('display_name', sql.String, 255))
- self.assertExpectedSchema('federated_user', cols)
-
- def test_group_model(self):
- cols = (('id', sql.String, 64),
- ('name', sql.String, 64),
- ('description', sql.Text, None),
- ('domain_id', sql.String, 64),
- ('extra', sql.JsonBlob, None))
- self.assertExpectedSchema('group', cols)
-
- def test_domain_model(self):
- cols = (('id', sql.String, 64),
- ('name', sql.String, 64),
- ('enabled', sql.Boolean, True),
- ('extra', sql.JsonBlob, None))
- self.assertExpectedSchema('domain', cols)
-
- def test_project_model(self):
- cols = (('id', sql.String, 64),
- ('name', sql.String, 64),
- ('description', sql.Text, None),
- ('domain_id', sql.String, 64),
- ('enabled', sql.Boolean, None),
- ('extra', sql.JsonBlob, None),
- ('parent_id', sql.String, 64),
- ('is_domain', sql.Boolean, False))
- self.assertExpectedSchema('project', cols)
-
- def test_role_assignment_model(self):
- cols = (('type', sql.Enum, None),
- ('actor_id', sql.String, 64),
- ('target_id', sql.String, 64),
- ('role_id', sql.String, 64),
- ('inherited', sql.Boolean, False))
- self.assertExpectedSchema('assignment', cols)
-
- def test_user_group_membership(self):
- cols = (('group_id', sql.String, 64),
- ('user_id', sql.String, 64))
- self.assertExpectedSchema('user_group_membership', cols)
-
- def test_revocation_event_model(self):
- cols = (('id', sql.Integer, None),
- ('domain_id', sql.String, 64),
- ('project_id', sql.String, 64),
- ('user_id', sql.String, 64),
- ('role_id', sql.String, 64),
- ('trust_id', sql.String, 64),
- ('consumer_id', sql.String, 64),
- ('access_token_id', sql.String, 64),
- ('issued_before', sql.DateTime, None),
- ('expires_at', sql.DateTime, None),
- ('revoked_at', sql.DateTime, None),
- ('audit_id', sql.String, 32),
- ('audit_chain_id', sql.String, 32))
- self.assertExpectedSchema('revocation_event', cols)
-
-
-class SqlIdentity(SqlTests, identity_tests.IdentityTests,
- assignment_tests.AssignmentTests,
- resource_tests.ResourceTests):
- def test_password_hashed(self):
- with sql.session_for_read() as session:
- user_ref = self.identity_api._get_user(session,
- self.user_foo['id'])
- self.assertNotEqual(self.user_foo['password'],
- user_ref['password'])
-
- def test_create_user_with_null_password(self):
- user_dict = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id)
- user_dict["password"] = None
- new_user_dict = self.identity_api.create_user(user_dict)
- with sql.session_for_read() as session:
- new_user_ref = self.identity_api._get_user(session,
- new_user_dict['id'])
- self.assertFalse(new_user_ref.local_user.passwords)
-
- def test_update_user_with_null_password(self):
- user_dict = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id)
- self.assertTrue(user_dict['password'])
- new_user_dict = self.identity_api.create_user(user_dict)
- new_user_dict["password"] = None
- new_user_dict = self.identity_api.update_user(new_user_dict['id'],
- new_user_dict)
- with sql.session_for_read() as session:
- new_user_ref = self.identity_api._get_user(session,
- new_user_dict['id'])
- self.assertFalse(new_user_ref.local_user.passwords)
-
- def test_delete_user_with_project_association(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_bar['id'],
- user['id'])
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound,
- self.assignment_api.list_projects_for_user,
- user['id'])
-
- def test_create_null_user_name(self):
- user = unit.new_user_ref(name=None,
- domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.identity_api.create_user,
- user)
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user_by_name,
- user['name'],
- CONF.identity.default_domain_id)
-
- def test_create_user_case_sensitivity(self):
- # user name case sensitivity is down to the fact that it is marked as
- # an SQL UNIQUE column, which may not be valid for other backends, like
- # LDAP.
-
- # create a ref with a lowercase name
- ref = unit.new_user_ref(name=uuid.uuid4().hex.lower(),
- domain_id=CONF.identity.default_domain_id)
- ref = self.identity_api.create_user(ref)
-
- # assign a new ID with the same name, but this time in uppercase
- ref['name'] = ref['name'].upper()
- self.identity_api.create_user(ref)
-
- def test_create_federated_user_unique_constraint(self):
- federated_dict = unit.new_federated_user_ref()
- user_dict = self.shadow_users_api.create_federated_user(federated_dict)
- user_dict = self.identity_api.get_user(user_dict["id"])
- self.assertIsNotNone(user_dict["id"])
- self.assertRaises(exception.Conflict,
- self.shadow_users_api.create_federated_user,
- federated_dict)
-
- def test_get_federated_user(self):
- federated_dict = unit.new_federated_user_ref()
- user_dict_create = self.shadow_users_api.create_federated_user(
- federated_dict)
- user_dict_get = self.shadow_users_api.get_federated_user(
- federated_dict["idp_id"],
- federated_dict["protocol_id"],
- federated_dict["unique_id"])
- self.assertItemsEqual(user_dict_create, user_dict_get)
- self.assertEqual(user_dict_create["id"], user_dict_get["id"])
-
- def test_update_federated_user_display_name(self):
- federated_dict = unit.new_federated_user_ref()
- user_dict_create = self.shadow_users_api.create_federated_user(
- federated_dict)
- new_display_name = uuid.uuid4().hex
- self.shadow_users_api.update_federated_user_display_name(
- federated_dict["idp_id"],
- federated_dict["protocol_id"],
- federated_dict["unique_id"],
- new_display_name)
- user_ref = self.shadow_users_api._get_federated_user(
- federated_dict["idp_id"],
- federated_dict["protocol_id"],
- federated_dict["unique_id"])
- self.assertEqual(user_ref.federated_users[0].display_name,
- new_display_name)
- self.assertEqual(user_dict_create["id"], user_ref.id)
-
- def test_create_project_case_sensitivity(self):
- # project name case sensitivity is down to the fact that it is marked
- # as an SQL UNIQUE column, which may not be valid for other backends,
- # like LDAP.
-
- # create a ref with a lowercase name
- ref = unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(ref['id'], ref)
-
- # assign a new ID with the same name, but this time in uppercase
- ref['id'] = uuid.uuid4().hex
- ref['name'] = ref['name'].upper()
- self.resource_api.create_project(ref['id'], ref)
-
- def test_create_null_project_name(self):
- project = unit.new_project_ref(
- name=None, domain_id=CONF.identity.default_domain_id)
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'],
- project)
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project_by_name,
- project['name'],
- CONF.identity.default_domain_id)
-
- def test_delete_project_with_user_association(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- self.assignment_api.add_user_to_project(self.tenant_bar['id'],
- user['id'])
- self.resource_api.delete_project(self.tenant_bar['id'])
- tenants = self.assignment_api.list_projects_for_user(user['id'])
- self.assertEqual([], tenants)
-
- def test_update_project_returns_extra(self):
- """This tests for backwards-compatibility with an essex/folsom bug.
-
- Non-indexed attributes were returned in an 'extra' attribute, instead
- of on the entity itself; for consistency and backwards compatibility,
- those attributes should be included twice.
-
- This behavior is specific to the SQL driver.
-
- """
- arbitrary_key = uuid.uuid4().hex
- arbitrary_value = uuid.uuid4().hex
- project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- project[arbitrary_key] = arbitrary_value
- ref = self.resource_api.create_project(project['id'], project)
- self.assertEqual(arbitrary_value, ref[arbitrary_key])
- self.assertIsNone(ref.get('extra'))
-
- ref['name'] = uuid.uuid4().hex
- ref = self.resource_api.update_project(ref['id'], ref)
- self.assertEqual(arbitrary_value, ref[arbitrary_key])
- self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
-
- def test_update_user_returns_extra(self):
- """This tests for backwards-compatibility with an essex/folsom bug.
-
- Non-indexed attributes were returned in an 'extra' attribute, instead
- of on the entity itself; for consistency and backwards compatibility,
- those attributes should be included twice.
-
- This behavior is specific to the SQL driver.
-
- """
- arbitrary_key = uuid.uuid4().hex
- arbitrary_value = uuid.uuid4().hex
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user[arbitrary_key] = arbitrary_value
- del user["id"]
- ref = self.identity_api.create_user(user)
- self.assertEqual(arbitrary_value, ref[arbitrary_key])
- self.assertIsNone(ref.get('password'))
- self.assertIsNone(ref.get('extra'))
-
- user['name'] = uuid.uuid4().hex
- user['password'] = uuid.uuid4().hex
- ref = self.identity_api.update_user(ref['id'], user)
- self.assertIsNone(ref.get('password'))
- self.assertIsNone(ref['extra'].get('password'))
- self.assertEqual(arbitrary_value, ref[arbitrary_key])
- self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
-
- def test_sql_user_to_dict_null_default_project_id(self):
- user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user = self.identity_api.create_user(user)
- with sql.session_for_read() as session:
- query = session.query(identity_sql.User)
- query = query.filter_by(id=user['id'])
- raw_user_ref = query.one()
- self.assertIsNone(raw_user_ref.default_project_id)
- user_ref = raw_user_ref.to_dict()
- self.assertNotIn('default_project_id', user_ref)
- session.close()
-
- def test_list_domains_for_user(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user = unit.new_user_ref(domain_id=domain['id'])
-
- test_domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(test_domain1['id'], test_domain1)
- test_domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(test_domain2['id'], test_domain2)
-
- user = self.identity_api.create_user(user)
- user_domains = self.assignment_api.list_domains_for_user(user['id'])
- self.assertEqual(0, len(user_domains))
- self.assignment_api.create_grant(user_id=user['id'],
- domain_id=test_domain1['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(user_id=user['id'],
- domain_id=test_domain2['id'],
- role_id=self.role_member['id'])
- user_domains = self.assignment_api.list_domains_for_user(user['id'])
- self.assertThat(user_domains, matchers.HasLength(2))
-
- def test_list_domains_for_user_with_grants(self):
- # Create two groups each with a role on a different domain, and
- # make user1 a member of both groups. Both these new domains
- # should now be included, along with any direct user grants.
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user = unit.new_user_ref(domain_id=domain['id'])
- user = self.identity_api.create_user(user)
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
- group2 = unit.new_group_ref(domain_id=domain['id'])
- group2 = self.identity_api.create_group(group2)
-
- test_domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(test_domain1['id'], test_domain1)
- test_domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(test_domain2['id'], test_domain2)
- test_domain3 = unit.new_domain_ref()
- self.resource_api.create_domain(test_domain3['id'], test_domain3)
-
- self.identity_api.add_user_to_group(user['id'], group1['id'])
- self.identity_api.add_user_to_group(user['id'], group2['id'])
-
- # Create 3 grants, one user grant, the other two as group grants
- self.assignment_api.create_grant(user_id=user['id'],
- domain_id=test_domain1['id'],
- role_id=self.role_member['id'])
- self.assignment_api.create_grant(group_id=group1['id'],
- domain_id=test_domain2['id'],
- role_id=self.role_admin['id'])
- self.assignment_api.create_grant(group_id=group2['id'],
- domain_id=test_domain3['id'],
- role_id=self.role_admin['id'])
- user_domains = self.assignment_api.list_domains_for_user(user['id'])
- self.assertThat(user_domains, matchers.HasLength(3))
-
- def test_list_domains_for_user_with_inherited_grants(self):
- """Test that inherited roles on the domain are excluded.
-
- Test Plan:
-
- - Create two domains, one user, group and role
- - Domain1 is given an inherited user role, Domain2 an inherited
- group role (for a group of which the user is a member)
- - When listing domains for user, neither domain should be returned
-
- """
- domain1 = unit.new_domain_ref()
- domain1 = self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- domain2 = self.resource_api.create_domain(domain2['id'], domain2)
- user = unit.new_user_ref(domain_id=domain1['id'])
- user = self.identity_api.create_user(user)
- group = unit.new_group_ref(domain_id=domain1['id'])
- group = self.identity_api.create_group(group)
- self.identity_api.add_user_to_group(user['id'], group['id'])
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- # Create a grant on each domain, one user grant, one group grant,
- # both inherited.
- self.assignment_api.create_grant(user_id=user['id'],
- domain_id=domain1['id'],
- role_id=role['id'],
- inherited_to_projects=True)
- self.assignment_api.create_grant(group_id=group['id'],
- domain_id=domain2['id'],
- role_id=role['id'],
- inherited_to_projects=True)
-
- user_domains = self.assignment_api.list_domains_for_user(user['id'])
- # No domains should be returned since both domains have only inherited
- # roles assignments.
- self.assertThat(user_domains, matchers.HasLength(0))
-
- def test_storing_null_domain_id_in_project_ref(self):
- """Test the special storage of domain_id=None in sql resource driver.
-
- The resource driver uses a special value in place of None for domain_id
- in the project record. This shouldn't escape the driver. Hence we test
- the interface to ensure that you can store a domain_id of None, and
- that any special value used inside the driver does not escape through
- the interface.
-
- """
- spoiler_project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(spoiler_project['id'],
- spoiler_project)
-
- # First let's create a project with a None domain_id and make sure we
- # can read it back.
- project = unit.new_project_ref(domain_id=None, is_domain=True)
- project = self.resource_api.create_project(project['id'], project)
- ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project, ref)
-
- # Can we get it by name?
- ref = self.resource_api.get_project_by_name(project['name'], None)
- self.assertDictEqual(project, ref)
-
- # Can we filter for them - create a second domain to ensure we are
- # testing the receipt of more than one.
- project2 = unit.new_project_ref(domain_id=None, is_domain=True)
- project2 = self.resource_api.create_project(project2['id'], project2)
- hints = driver_hints.Hints()
- hints.add_filter('domain_id', None)
- refs = self.resource_api.list_projects(hints)
- self.assertThat(refs, matchers.HasLength(2 + self.domain_count))
- self.assertIn(project, refs)
- self.assertIn(project2, refs)
-
- # Can we update it?
- project['name'] = uuid.uuid4().hex
- self.resource_api.update_project(project['id'], project)
- ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project, ref)
-
- # Finally, make sure we can delete it
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
- self.resource_api.delete_project(project['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project['id'])
-
- def test_hidden_project_domain_root_is_really_hidden(self):
- """Ensure we cannot access the hidden root of all project domains.
-
- Calling any of the driver methods should result in the same as
- would be returned if we passed a project that does not exist. We don't
- test create_project, since we do not allow a caller of our API to
- specify their own ID for a new entity.
-
- """
- def _exercise_project_api(ref_id):
- driver = self.resource_api.driver
- self.assertRaises(exception.ProjectNotFound,
- driver.get_project,
- ref_id)
-
- self.assertRaises(exception.ProjectNotFound,
- driver.get_project_by_name,
- resource.NULL_DOMAIN_ID,
- ref_id)
-
- project_ids = [x['id'] for x in
- driver.list_projects(driver_hints.Hints())]
- self.assertNotIn(ref_id, project_ids)
-
- projects = driver.list_projects_from_ids([ref_id])
- self.assertThat(projects, matchers.HasLength(0))
-
- project_ids = [x for x in
- driver.list_project_ids_from_domain_ids([ref_id])]
- self.assertNotIn(ref_id, project_ids)
-
- self.assertRaises(exception.DomainNotFound,
- driver.list_projects_in_domain,
- ref_id)
-
- project_ids = [
- x['id'] for x in
- driver.list_projects_acting_as_domain(driver_hints.Hints())]
- self.assertNotIn(ref_id, project_ids)
-
- projects = driver.list_projects_in_subtree(ref_id)
- self.assertThat(projects, matchers.HasLength(0))
-
- self.assertRaises(exception.ProjectNotFound,
- driver.list_project_parents,
- ref_id)
-
- # A non-existing project just returns True from the driver
- self.assertTrue(driver.is_leaf_project(ref_id))
-
- self.assertRaises(exception.ProjectNotFound,
- driver.update_project,
- ref_id,
- {})
-
- self.assertRaises(exception.ProjectNotFound,
- driver.delete_project,
- ref_id)
-
- # Deleting list of projects that includes a non-existing project
- # should be silent
- driver.delete_projects_from_ids([ref_id])
-
- _exercise_project_api(uuid.uuid4().hex)
- _exercise_project_api(resource.NULL_DOMAIN_ID)
-
-
-class SqlTrust(SqlTests, trust_tests.TrustTests):
- pass
-
-
-class SqlToken(SqlTests, token_tests.TokenTests):
- def test_token_revocation_list_uses_right_columns(self):
- # This query used to be heavy with too many columns. We want
- # to make sure it is only running with the minimum columns
- # necessary.
-
- expected_query_args = (token_sql.TokenModel.id,
- token_sql.TokenModel.expires,
- token_sql.TokenModel.extra,)
-
- with mock.patch.object(token_sql, 'sql') as mock_sql:
- tok = token_sql.Token()
- tok.list_revoked_tokens()
-
- mock_query = mock_sql.session_for_read().__enter__().query
- mock_query.assert_called_with(*expected_query_args)
-
- def test_flush_expired_tokens_batch(self):
- # TODO(dstanek): This test should be rewritten to be less
- # brittle. The code will likely need to be changed first. I
- # just copied the spirit of the existing test when I rewrote
- # mox -> mock. These tests are brittle because they have the
- # call structure for SQLAlchemy encoded in them.
-
- # test sqlite dialect
- with mock.patch.object(token_sql, 'sql') as mock_sql:
- mock_sql.get_session().bind.dialect.name = 'sqlite'
- tok = token_sql.Token()
- tok.flush_expired_tokens()
-
- filter_mock = mock_sql.get_session().query().filter()
- self.assertFalse(filter_mock.limit.called)
- self.assertTrue(filter_mock.delete.called_once)
-
- def test_flush_expired_tokens_batch_mysql(self):
- # test mysql dialect, we don't need to test IBM DB SA separately, since
- # other tests below test the differences between how they use the batch
- # strategy
- with mock.patch.object(token_sql, 'sql') as mock_sql:
- mock_sql.session_for_write().__enter__(
- ).query().filter().delete.return_value = 0
-
- mock_sql.session_for_write().__enter__(
- ).bind.dialect.name = 'mysql'
-
- tok = token_sql.Token()
- expiry_mock = mock.Mock()
- ITERS = [1, 2, 3]
- expiry_mock.return_value = iter(ITERS)
- token_sql._expiry_range_batched = expiry_mock
- tok.flush_expired_tokens()
-
- # The expiry strategy is only invoked once, the other calls are via
- # the yield return.
- self.assertEqual(1, expiry_mock.call_count)
-
- mock_delete = mock_sql.session_for_write().__enter__(
- ).query().filter().delete
-
- self.assertThat(mock_delete.call_args_list,
- matchers.HasLength(len(ITERS)))
-
- def test_expiry_range_batched(self):
- upper_bound_mock = mock.Mock(side_effect=[1, "final value"])
- sess_mock = mock.Mock()
- query_mock = sess_mock.query().filter().order_by().offset().limit()
- query_mock.one.side_effect = [['test'], sql.NotFound()]
- for i, x in enumerate(token_sql._expiry_range_batched(sess_mock,
- upper_bound_mock,
- batch_size=50)):
- if i == 0:
- # The first time the batch iterator returns, it should return
- # the first result that comes back from the database.
- self.assertEqual('test', x)
- elif i == 1:
- # The second time, the database range function should return
- # nothing, so the batch iterator returns the result of the
- # upper_bound function
- self.assertEqual("final value", x)
- else:
- self.fail("range batch function returned more than twice")
-
- def test_expiry_range_strategy_sqlite(self):
- tok = token_sql.Token()
- sqlite_strategy = tok._expiry_range_strategy('sqlite')
- self.assertEqual(token_sql._expiry_range_all, sqlite_strategy)
-
- def test_expiry_range_strategy_ibm_db_sa(self):
- tok = token_sql.Token()
- db2_strategy = tok._expiry_range_strategy('ibm_db_sa')
- self.assertIsInstance(db2_strategy, functools.partial)
- self.assertEqual(token_sql._expiry_range_batched, db2_strategy.func)
- self.assertEqual({'batch_size': 100}, db2_strategy.keywords)
-
- def test_expiry_range_strategy_mysql(self):
- tok = token_sql.Token()
- mysql_strategy = tok._expiry_range_strategy('mysql')
- self.assertIsInstance(mysql_strategy, functools.partial)
- self.assertEqual(token_sql._expiry_range_batched, mysql_strategy.func)
- self.assertEqual({'batch_size': 1000}, mysql_strategy.keywords)
-
-
-class SqlCatalog(SqlTests, catalog_tests.CatalogTests):
-
- _legacy_endpoint_id_in_endpoint = True
- _enabled_default_to_true_when_creating_endpoint = True
-
- def test_catalog_ignored_malformed_urls(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- malformed_url = "http://192.168.1.104:8774/v2/$(tenant)s"
- endpoint = unit.new_endpoint_ref(service_id=service['id'],
- url=malformed_url,
- region_id=None)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
-
- # NOTE(dstanek): there are no valid URLs, so nothing is in the catalog
- catalog = self.catalog_api.get_catalog('fake-user', 'fake-tenant')
- self.assertEqual({}, catalog)
-
- def test_get_catalog_with_empty_public_url(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- endpoint = unit.new_endpoint_ref(url='', service_id=service['id'],
- region_id=None)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
-
- catalog = self.catalog_api.get_catalog('user', 'tenant')
- catalog_endpoint = catalog[endpoint['region_id']][service['type']]
- self.assertEqual(service['name'], catalog_endpoint['name'])
- self.assertEqual(endpoint['id'], catalog_endpoint['id'])
- self.assertEqual('', catalog_endpoint['publicURL'])
- self.assertIsNone(catalog_endpoint.get('adminURL'))
- self.assertIsNone(catalog_endpoint.get('internalURL'))
-
- def test_create_endpoint_region_returns_not_found(self):
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- endpoint = unit.new_endpoint_ref(region_id=uuid.uuid4().hex,
- service_id=service['id'])
-
- self.assertRaises(exception.ValidationError,
- self.catalog_api.create_endpoint,
- endpoint['id'],
- endpoint.copy())
-
- def test_create_region_invalid_id(self):
- region = unit.new_region_ref(id='0' * 256)
-
- self.assertRaises(exception.StringLengthExceeded,
- self.catalog_api.create_region,
- region)
-
- def test_create_region_invalid_parent_id(self):
- region = unit.new_region_ref(parent_region_id='0' * 256)
-
- self.assertRaises(exception.RegionNotFound,
- self.catalog_api.create_region,
- region)
-
- def test_delete_region_with_endpoint(self):
- # create a region
- region = unit.new_region_ref()
- self.catalog_api.create_region(region)
-
- # create a child region
- child_region = unit.new_region_ref(parent_region_id=region['id'])
- self.catalog_api.create_region(child_region)
- # create a service
- service = unit.new_service_ref()
- self.catalog_api.create_service(service['id'], service)
-
- # create an endpoint attached to the service and child region
- child_endpoint = unit.new_endpoint_ref(region_id=child_region['id'],
- service_id=service['id'])
-
- self.catalog_api.create_endpoint(child_endpoint['id'], child_endpoint)
- self.assertRaises(exception.RegionDeletionError,
- self.catalog_api.delete_region,
- child_region['id'])
-
- # create an endpoint attached to the service and parent region
- endpoint = unit.new_endpoint_ref(region_id=region['id'],
- service_id=service['id'])
-
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- self.assertRaises(exception.RegionDeletionError,
- self.catalog_api.delete_region,
- region['id'])
-
-
-class SqlPolicy(SqlTests, policy_tests.PolicyTests):
- pass
-
-
-class SqlInheritance(SqlTests, assignment_tests.InheritanceTests):
- pass
-
-
-class SqlImpliedRoles(SqlTests, assignment_tests.ImpliedRoleTests):
- pass
-
-
-class SqlTokenCacheInvalidation(SqlTests, token_tests.TokenCacheInvalidation):
- def setUp(self):
- super(SqlTokenCacheInvalidation, self).setUp()
- self._create_test_data()
-
-
-class SqlFilterTests(SqlTests, identity_tests.FilterTests):
-
- def clean_up_entities(self):
- """Clean up entity test data from Filter Test Cases."""
- for entity in ['user', 'group', 'project']:
- self._delete_test_data(entity, self.entity_list[entity])
- self._delete_test_data(entity, self.domain1_entity_list[entity])
- del self.entity_list
- del self.domain1_entity_list
- self.domain1['enabled'] = False
- self.resource_api.update_domain(self.domain1['id'], self.domain1)
- self.resource_api.delete_domain(self.domain1['id'])
- del self.domain1
-
- def test_list_entities_filtered_by_domain(self):
- # NOTE(henry-nash): This method is here rather than in
- # unit.identity.test_backends since any domain filtering with LDAP is
- # handled by the manager layer (and is already tested elsewhere) not at
- # the driver level.
- self.addCleanup(self.clean_up_entities)
- self.domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(self.domain1['id'], self.domain1)
-
- self.entity_list = {}
- self.domain1_entity_list = {}
- for entity in ['user', 'group', 'project']:
- # Create 5 entities, 3 of which are in domain1
- DOMAIN1_ENTITIES = 3
- self.entity_list[entity] = self._create_test_data(entity, 2)
- self.domain1_entity_list[entity] = self._create_test_data(
- entity, DOMAIN1_ENTITIES, self.domain1['id'])
-
- # Should get back the DOMAIN1_ENTITIES in domain1
- hints = driver_hints.Hints()
- hints.add_filter('domain_id', self.domain1['id'])
- entities = self._list_entities(entity)(hints=hints)
- self.assertEqual(DOMAIN1_ENTITIES, len(entities))
- self._match_with_list(entities, self.domain1_entity_list[entity])
- # Check the driver has removed the filter from the list hints
- self.assertFalse(hints.get_exact_filter_by_name('domain_id'))
-
- def test_filter_sql_injection_attack(self):
- """Test against sql injection attack on filters
-
- Test Plan:
- - Attempt to get all entities back by passing a two-term attribute
- - Attempt to piggyback filter to damage DB (e.g. drop table)
-
- """
- # Check we have some users
- users = self.identity_api.list_users()
- self.assertTrue(len(users) > 0)
-
- hints = driver_hints.Hints()
- hints.add_filter('name', "anything' or 'x'='x")
- users = self.identity_api.list_users(hints=hints)
- self.assertEqual(0, len(users))
-
- # See if we can add a SQL command...use the group table instead of the
- # user table since 'user' is reserved word for SQLAlchemy.
- group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
- group = self.identity_api.create_group(group)
-
- hints = driver_hints.Hints()
- hints.add_filter('name', "x'; drop table group")
- groups = self.identity_api.list_groups(hints=hints)
- self.assertEqual(0, len(groups))
-
- groups = self.identity_api.list_groups()
- self.assertTrue(len(groups) > 0)
-
-
-class SqlLimitTests(SqlTests, identity_tests.LimitTests):
- def setUp(self):
- super(SqlLimitTests, self).setUp()
- identity_tests.LimitTests.setUp(self)
-
-
-class FakeTable(sql.ModelBase):
- __tablename__ = 'test_table'
- col = sql.Column(sql.String(32), primary_key=True)
-
- @sql.handle_conflicts('keystone')
- def insert(self):
- raise db_exception.DBDuplicateEntry
-
- @sql.handle_conflicts('keystone')
- def update(self):
- raise db_exception.DBError(
- inner_exception=exc.IntegrityError('a', 'a', 'a'))
-
- @sql.handle_conflicts('keystone')
- def lookup(self):
- raise KeyError
-
-
-class SqlDecorators(unit.TestCase):
-
- def test_initialization_fail(self):
- self.assertRaises(exception.StringLengthExceeded,
- FakeTable, col='a' * 64)
-
- def test_initialization(self):
- tt = FakeTable(col='a')
- self.assertEqual('a', tt.col)
-
- def test_conflict_happend(self):
- self.assertRaises(exception.Conflict, FakeTable().insert)
- self.assertRaises(exception.UnexpectedError, FakeTable().update)
-
- def test_not_conflict_error(self):
- self.assertRaises(KeyError, FakeTable().lookup)
-
-
-class SqlModuleInitialization(unit.TestCase):
-
- @mock.patch.object(sql.core, 'CONF')
- @mock.patch.object(options, 'set_defaults')
- def test_initialize_module(self, set_defaults, CONF):
- sql.initialize()
- set_defaults.assert_called_with(CONF,
- connection='sqlite:///keystone.db')
-
-
-class SqlCredential(SqlTests):
-
- def _create_credential_with_user_id(self, user_id=uuid.uuid4().hex):
- credential = unit.new_credential_ref(user_id=user_id,
- extra=uuid.uuid4().hex,
- type=uuid.uuid4().hex)
- self.credential_api.create_credential(credential['id'], credential)
- return credential
-
- def _validateCredentialList(self, retrieved_credentials,
- expected_credentials):
- self.assertEqual(len(expected_credentials), len(retrieved_credentials))
- retrived_ids = [c['id'] for c in retrieved_credentials]
- for cred in expected_credentials:
- self.assertIn(cred['id'], retrived_ids)
-
- def setUp(self):
- super(SqlCredential, self).setUp()
- self.credentials = []
- for _ in range(3):
- self.credentials.append(
- self._create_credential_with_user_id())
- self.user_credentials = []
- for _ in range(3):
- cred = self._create_credential_with_user_id(self.user_foo['id'])
- self.user_credentials.append(cred)
- self.credentials.append(cred)
-
- def test_list_credentials(self):
- credentials = self.credential_api.list_credentials()
- self._validateCredentialList(credentials, self.credentials)
- # test filtering using hints
- hints = driver_hints.Hints()
- hints.add_filter('user_id', self.user_foo['id'])
- credentials = self.credential_api.list_credentials(hints)
- self._validateCredentialList(credentials, self.user_credentials)
-
- def test_list_credentials_for_user(self):
- credentials = self.credential_api.list_credentials_for_user(
- self.user_foo['id'])
- self._validateCredentialList(credentials, self.user_credentials)
-
- def test_list_credentials_for_user_and_type(self):
- cred = self.user_credentials[0]
- credentials = self.credential_api.list_credentials_for_user(
- self.user_foo['id'], type=cred['type'])
- self._validateCredentialList(credentials, [cred])
diff --git a/keystone-moon/keystone/tests/unit/test_backend_templated.py b/keystone-moon/keystone/tests/unit/test_backend_templated.py
deleted file mode 100644
index ca957e78..00000000
--- a/keystone-moon/keystone/tests/unit/test_backend_templated.py
+++ /dev/null
@@ -1,261 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import mock
-from six.moves import zip
-
-from keystone import catalog
-from keystone.tests import unit
-from keystone.tests.unit.catalog import test_backends as catalog_tests
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import database
-
-
-BROKEN_WRITE_FUNCTIONALITY_MSG = ("Templated backend doesn't correctly "
- "implement write operations")
-
-
-class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests):
-
- DEFAULT_FIXTURE = {
- 'RegionOne': {
- 'compute': {
- 'adminURL': 'http://localhost:8774/v1.1/bar',
- 'publicURL': 'http://localhost:8774/v1.1/bar',
- 'internalURL': 'http://localhost:8774/v1.1/bar',
- 'name': "'Compute Service'",
- 'id': '2'
- },
- 'identity': {
- 'adminURL': 'http://localhost:35357/v2.0',
- 'publicURL': 'http://localhost:5000/v2.0',
- 'internalURL': 'http://localhost:35357/v2.0',
- 'name': "'Identity Service'",
- 'id': '1'
- }
- }
- }
-
- def setUp(self):
- super(TestTemplatedCatalog, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
- self.load_fixtures(default_fixtures)
-
- def config_overrides(self):
- super(TestTemplatedCatalog, self).config_overrides()
- self.config_fixture.config(
- group='catalog',
- driver='templated',
- template_file=unit.dirs.tests('default_catalog.templates'))
-
- def test_get_catalog(self):
- catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
- self.assertDictEqual(self.DEFAULT_FIXTURE, catalog_ref)
-
- # NOTE(lbragstad): This test is skipped because the catalog is being
- # modified within the test and not through the API.
- @unit.skip_if_cache_is_enabled('catalog')
- def test_catalog_ignored_malformed_urls(self):
- # both endpoints are in the catalog
- catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
- self.assertEqual(2, len(catalog_ref['RegionOne']))
-
- region = self.catalog_api.driver.templates['RegionOne']
- region['compute']['adminURL'] = 'http://localhost:8774/v1.1/$(tenant)s'
-
- # the malformed one has been removed
- catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
- self.assertEqual(1, len(catalog_ref['RegionOne']))
-
- def test_get_catalog_endpoint_disabled(self):
- self.skipTest("Templated backend doesn't have disabled endpoints")
-
- def test_get_v3_catalog_endpoint_disabled(self):
- self.skipTest("Templated backend doesn't have disabled endpoints")
-
- def assert_catalogs_equal(self, expected, observed):
- sort_key = lambda d: d['id']
- for e, o in zip(sorted(expected, key=sort_key),
- sorted(observed, key=sort_key)):
- expected_endpoints = e.pop('endpoints')
- observed_endpoints = o.pop('endpoints')
- self.assertDictEqual(e, o)
- self.assertItemsEqual(expected_endpoints, observed_endpoints)
-
- def test_get_v3_catalog(self):
- user_id = uuid.uuid4().hex
- project_id = uuid.uuid4().hex
- catalog_ref = self.catalog_api.get_v3_catalog(user_id, project_id)
- exp_catalog = [
- {'endpoints': [
- {'interface': 'admin',
- 'region': 'RegionOne',
- 'url': 'http://localhost:8774/v1.1/%s' % project_id},
- {'interface': 'public',
- 'region': 'RegionOne',
- 'url': 'http://localhost:8774/v1.1/%s' % project_id},
- {'interface': 'internal',
- 'region': 'RegionOne',
- 'url': 'http://localhost:8774/v1.1/%s' % project_id}],
- 'type': 'compute',
- 'name': "'Compute Service'",
- 'id': '2'},
- {'endpoints': [
- {'interface': 'admin',
- 'region': 'RegionOne',
- 'url': 'http://localhost:35357/v2.0'},
- {'interface': 'public',
- 'region': 'RegionOne',
- 'url': 'http://localhost:5000/v2.0'},
- {'interface': 'internal',
- 'region': 'RegionOne',
- 'url': 'http://localhost:35357/v2.0'}],
- 'type': 'identity',
- 'name': "'Identity Service'",
- 'id': '1'}]
- self.assert_catalogs_equal(exp_catalog, catalog_ref)
-
- def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
- user_id = uuid.uuid4().hex
- tenant_id = None
- # If the URL has no 'tenant_id' to substitute, we will skip the
- # endpoint which contains this kind of URL.
- catalog_ref = self.catalog_api.get_v3_catalog(user_id, tenant_id)
- exp_catalog = [
- {'endpoints': [],
- 'type': 'compute',
- 'name': "'Compute Service'",
- 'id': '2'},
- {'endpoints': [
- {'interface': 'admin',
- 'region': 'RegionOne',
- 'url': 'http://localhost:35357/v2.0'},
- {'interface': 'public',
- 'region': 'RegionOne',
- 'url': 'http://localhost:5000/v2.0'},
- {'interface': 'internal',
- 'region': 'RegionOne',
- 'url': 'http://localhost:35357/v2.0'}],
- 'type': 'identity',
- 'name': "'Identity Service'",
- 'id': '1'}]
- self.assert_catalogs_equal(exp_catalog, catalog_ref)
-
- def test_list_regions_filtered_by_parent_region_id(self):
- self.skipTest('Templated backend does not support hints')
-
- def test_service_filtering(self):
- self.skipTest("Templated backend doesn't support filtering")
-
- def test_list_services_with_hints(self):
- hints = {}
- services = self.catalog_api.list_services(hints=hints)
- exp_services = [
- {'type': 'compute',
- 'description': '',
- 'enabled': True,
- 'name': "'Compute Service'",
- 'id': 'compute'},
- {'type': 'identity',
- 'description': '',
- 'enabled': True,
- 'name': "'Identity Service'",
- 'id': 'identity'}]
- self.assertItemsEqual(exp_services, services)
-
- # NOTE(dstanek): the following methods have been overridden
- # from unit.catalog.test_backends.CatalogTests.
-
- def test_region_crud(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_cache_layer_region_crud(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_region(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_create_region_with_duplicate_id(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_delete_region_returns_not_found(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_create_region_invalid_parent_region_returns_not_found(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_avoid_creating_circular_references_in_regions_update(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- @mock.patch.object(catalog.Driver,
- "_ensure_no_circle_in_hierarchical_regions")
- def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_service_crud(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_cache_layer_service_crud(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_service(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_delete_service_with_endpoint(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_cache_layer_delete_service_with_endpoint(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_delete_service_returns_not_found(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_update_endpoint_nonexistent_service(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_create_endpoint_nonexistent_region(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_update_endpoint_nonexistent_region(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_get_endpoint_returns_not_found(self):
- self.skipTest("Templated backend doesn't use IDs for endpoints.")
-
- def test_delete_endpoint_returns_not_found(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_create_endpoint(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_update_endpoint(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
-
- def test_list_endpoints(self):
- expected_urls = set(['http://localhost:$(public_port)s/v2.0',
- 'http://localhost:$(admin_port)s/v2.0',
- 'http://localhost:8774/v1.1/$(tenant_id)s'])
- endpoints = self.catalog_api.list_endpoints()
- self.assertEqual(expected_urls, set(e['url'] for e in endpoints))
-
- @unit.skip_if_cache_disabled('catalog')
- def test_invalidate_cache_when_updating_endpoint(self):
- self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
diff --git a/keystone-moon/keystone/tests/unit/test_cache.py b/keystone-moon/keystone/tests/unit/test_cache.py
deleted file mode 100644
index 3c2afe66..00000000
--- a/keystone-moon/keystone/tests/unit/test_cache.py
+++ /dev/null
@@ -1,324 +0,0 @@
-# Copyright 2013 Metacloud
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import time
-import uuid
-
-from dogpile.cache import api
-from dogpile.cache import proxy
-import mock
-from oslo_config import cfg
-
-from keystone.common import cache
-from keystone import exception
-from keystone.tests import unit
-
-
-CONF = cfg.CONF
-NO_VALUE = api.NO_VALUE
-
-
-def _copy_value(value):
- if value is not NO_VALUE:
- value = copy.deepcopy(value)
- return value
-
-
-# NOTE(morganfainberg): WARNING - It is not recommended to use the Memory
-# backend for dogpile.cache in a real deployment under any circumstances. The
-# backend does no cleanup of expired values and therefore will leak memory. The
-# backend is not implemented in a way to share data across processes (e.g.
-# Keystone in HTTPD. This proxy is a hack to get around the lack of isolation
-# of values in memory. Currently it blindly stores and retrieves the values
-# from the cache, and modifications to dicts/lists/etc returned can result in
-# changes to the cached values. In short, do not use the dogpile.cache.memory
-# backend unless you are running tests or expecting odd/strange results.
-class CacheIsolatingProxy(proxy.ProxyBackend):
- """Proxy that forces a memory copy of stored values.
-
- The default in-memory cache-region does not perform a copy on values it is
- meant to cache. Therefore if the value is modified after set or after get,
- the cached value also is modified. This proxy does a copy as the last
- thing before storing data.
-
- """
- def get(self, key):
- return _copy_value(self.proxied.get(key))
-
- def set(self, key, value):
- self.proxied.set(key, _copy_value(value))
-
-
-class TestProxy(proxy.ProxyBackend):
- def get(self, key):
- value = _copy_value(self.proxied.get(key))
- if value is not NO_VALUE:
- if isinstance(value[0], TestProxyValue):
- value[0].cached = True
- return value
-
-
-class TestProxyValue(object):
- def __init__(self, value):
- self.value = value
- self.cached = False
-
-
-class CacheRegionTest(unit.TestCase):
-
- def setUp(self):
- super(CacheRegionTest, self).setUp()
- self.region = cache.make_region()
- cache.configure_cache_region(self.region)
- self.region.wrap(TestProxy)
- self.test_value = TestProxyValue('Decorator Test')
-
- def _add_test_caching_option(self):
- self.config_fixture.register_opt(
- cfg.BoolOpt('caching', default=True), group='cache')
-
- def _get_cacheable_function(self):
- with mock.patch.object(cache.REGION, 'cache_on_arguments',
- self.region.cache_on_arguments):
- memoize = cache.get_memoization_decorator(section='cache')
-
- @memoize
- def cacheable_function(value):
- return value
-
- return cacheable_function
-
- def test_region_built_with_proxy_direct_cache_test(self):
- # Verify cache regions are properly built with proxies.
- test_value = TestProxyValue('Direct Cache Test')
- self.region.set('cache_test', test_value)
- cached_value = self.region.get('cache_test')
- self.assertTrue(cached_value.cached)
-
- def test_cache_region_no_error_multiple_config(self):
- # Verify configuring the CacheRegion again doesn't error.
- cache.configure_cache_region(self.region)
- cache.configure_cache_region(self.region)
-
- def _get_cache_fallthrough_fn(self, cache_time):
- with mock.patch.object(cache.REGION, 'cache_on_arguments',
- self.region.cache_on_arguments):
- memoize = cache.get_memoization_decorator(
- section='cache',
- expiration_section='assignment')
-
- class _test_obj(object):
- def __init__(self, value):
- self.test_value = value
-
- @memoize
- def get_test_value(self):
- return self.test_value
-
- def _do_test(value):
-
- test_obj = _test_obj(value)
-
- # Ensure the value has been cached
- test_obj.get_test_value()
- # Get the now cached value
- cached_value = test_obj.get_test_value()
- self.assertTrue(cached_value.cached)
- self.assertEqual(value.value, cached_value.value)
- self.assertEqual(cached_value.value, test_obj.test_value.value)
- # Change the underlying value on the test object.
- test_obj.test_value = TestProxyValue(uuid.uuid4().hex)
- self.assertEqual(cached_value.value,
- test_obj.get_test_value().value)
- # override the system time to ensure the non-cached new value
- # is returned
- new_time = time.time() + (cache_time * 2)
- with mock.patch.object(time, 'time',
- return_value=new_time):
- overriden_cache_value = test_obj.get_test_value()
- self.assertNotEqual(cached_value.value,
- overriden_cache_value.value)
- self.assertEqual(test_obj.test_value.value,
- overriden_cache_value.value)
-
- return _do_test
-
- def test_cache_no_fallthrough_expiration_time_fn(self):
- # Since we do not re-configure the cache region, for ease of testing
- # this value is set the same as the expiration_time default in the
- # [cache] section
- cache_time = 600
- expiration_time = cache.get_expiration_time_fn('role')
- do_test = self._get_cache_fallthrough_fn(cache_time)
- # Run the test with the assignment cache_time value
- self.config_fixture.config(cache_time=cache_time,
- group='role')
- test_value = TestProxyValue(uuid.uuid4().hex)
- self.assertEqual(cache_time, expiration_time())
- do_test(value=test_value)
-
- def test_cache_fallthrough_expiration_time_fn(self):
- # Since we do not re-configure the cache region, for ease of testing
- # this value is set the same as the expiration_time default in the
- # [cache] section
- cache_time = 599
- expiration_time = cache.get_expiration_time_fn('role')
- do_test = self._get_cache_fallthrough_fn(cache_time)
- # Run the test with the assignment cache_time value set to None and
- # the global value set.
- self.config_fixture.config(cache_time=None, group='role')
- test_value = TestProxyValue(uuid.uuid4().hex)
- self.assertIsNone(expiration_time())
- do_test(value=test_value)
-
- def test_should_cache_fn_global_cache_enabled(self):
- # Verify should_cache_fn generates a sane function for subsystem and
- # functions as expected with caching globally enabled.
- cacheable_function = self._get_cacheable_function()
-
- self.config_fixture.config(group='cache', enabled=True)
- cacheable_function(self.test_value)
- cached_value = cacheable_function(self.test_value)
- self.assertTrue(cached_value.cached)
-
- def test_should_cache_fn_global_cache_disabled(self):
- # Verify should_cache_fn generates a sane function for subsystem and
- # functions as expected with caching globally disabled.
- cacheable_function = self._get_cacheable_function()
-
- self.config_fixture.config(group='cache', enabled=False)
- cacheable_function(self.test_value)
- cached_value = cacheable_function(self.test_value)
- self.assertFalse(cached_value.cached)
-
- def test_should_cache_fn_global_cache_disabled_section_cache_enabled(self):
- # Verify should_cache_fn generates a sane function for subsystem and
- # functions as expected with caching globally disabled and the specific
- # section caching enabled.
- cacheable_function = self._get_cacheable_function()
-
- self._add_test_caching_option()
- self.config_fixture.config(group='cache', enabled=False)
- self.config_fixture.config(group='cache', caching=True)
-
- cacheable_function(self.test_value)
- cached_value = cacheable_function(self.test_value)
- self.assertFalse(cached_value.cached)
-
- def test_should_cache_fn_global_cache_enabled_section_cache_disabled(self):
- # Verify should_cache_fn generates a sane function for subsystem and
- # functions as expected with caching globally enabled and the specific
- # section caching disabled.
- cacheable_function = self._get_cacheable_function()
-
- self._add_test_caching_option()
- self.config_fixture.config(group='cache', enabled=True)
- self.config_fixture.config(group='cache', caching=False)
-
- cacheable_function(self.test_value)
- cached_value = cacheable_function(self.test_value)
- self.assertFalse(cached_value.cached)
-
- def test_should_cache_fn_global_cache_enabled_section_cache_enabled(self):
- # Verify should_cache_fn generates a sane function for subsystem and
- # functions as expected with caching globally enabled and the specific
- # section caching enabled.
- cacheable_function = self._get_cacheable_function()
-
- self._add_test_caching_option()
- self.config_fixture.config(group='cache', enabled=True)
- self.config_fixture.config(group='cache', caching=True)
-
- cacheable_function(self.test_value)
- cached_value = cacheable_function(self.test_value)
- self.assertTrue(cached_value.cached)
-
- def test_cache_dictionary_config_builder(self):
- """Validate we build a sane dogpile.cache dictionary config."""
- self.config_fixture.config(group='cache',
- config_prefix='test_prefix',
- backend='some_test_backend',
- expiration_time=86400,
- backend_argument=['arg1:test',
- 'arg2:test:test',
- 'arg3.invalid'])
-
- config_dict = cache.build_cache_config()
- self.assertEqual(
- CONF.cache.backend, config_dict['test_prefix.backend'])
- self.assertEqual(
- CONF.cache.expiration_time,
- config_dict['test_prefix.expiration_time'])
- self.assertEqual('test', config_dict['test_prefix.arguments.arg1'])
- self.assertEqual('test:test',
- config_dict['test_prefix.arguments.arg2'])
- self.assertNotIn('test_prefix.arguments.arg3', config_dict)
-
- def test_cache_debug_proxy(self):
- single_value = 'Test Value'
- single_key = 'testkey'
- multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
-
- self.region.set(single_key, single_value)
- self.assertEqual(single_value, self.region.get(single_key))
-
- self.region.delete(single_key)
- self.assertEqual(NO_VALUE, self.region.get(single_key))
-
- self.region.set_multi(multi_values)
- cached_values = self.region.get_multi(multi_values.keys())
- for value in multi_values.values():
- self.assertIn(value, cached_values)
- self.assertEqual(len(multi_values.values()), len(cached_values))
-
- self.region.delete_multi(multi_values.keys())
- for value in self.region.get_multi(multi_values.keys()):
- self.assertEqual(NO_VALUE, value)
-
- def test_configure_non_region_object_raises_error(self):
- self.assertRaises(exception.ValidationError,
- cache.configure_cache_region,
- "bogus")
-
-
-class CacheNoopBackendTest(unit.TestCase):
-
- def setUp(self):
- super(CacheNoopBackendTest, self).setUp()
- self.region = cache.make_region()
- cache.configure_cache_region(self.region)
-
- def config_overrides(self):
- super(CacheNoopBackendTest, self).config_overrides()
- self.config_fixture.config(group='cache',
- backend='keystone.common.cache.noop')
-
- def test_noop_backend(self):
- single_value = 'Test Value'
- single_key = 'testkey'
- multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
-
- self.region.set(single_key, single_value)
- self.assertEqual(NO_VALUE, self.region.get(single_key))
-
- self.region.set_multi(multi_values)
- cached_values = self.region.get_multi(multi_values.keys())
- self.assertEqual(len(cached_values), len(multi_values.values()))
- for value in cached_values:
- self.assertEqual(NO_VALUE, value)
-
- # Delete should not raise exceptions
- self.region.delete(single_key)
- self.region.delete_multi(multi_values.keys())
diff --git a/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py b/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py
deleted file mode 100644
index 66f80c21..00000000
--- a/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py
+++ /dev/null
@@ -1,728 +0,0 @@
-# Copyright 2014 Hewlett-Packard Development Company, L.P.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import collections
-import copy
-import functools
-import uuid
-
-from dogpile.cache import api
-from dogpile.cache import region as dp_region
-import six
-from six.moves import range
-
-from keystone.common.cache.backends import mongo
-from keystone import exception
-from keystone.tests import unit
-
-
-# Mock database structure sample where 'ks_cache' is database and
-# 'cache' is collection. Dogpile CachedValue data is divided in two
-# fields `value` (CachedValue.payload) and `meta` (CachedValue.metadata)
-ks_cache = {
- "cache": [
- {
- "value": {
- "serviceType": "identity",
- "allVersionsUrl": "https://dummyUrl",
- "dateLastModified": "ISODDate(2014-02-08T18:39:13.237Z)",
- "serviceName": "Identity",
- "enabled": "True"
- },
- "meta": {
- "v": 1,
- "ct": 1392371422.015121
- },
- "doc_date": "ISODate('2014-02-14T09:50:22.015Z')",
- "_id": "8251dc95f63842719c077072f1047ddf"
- },
- {
- "value": "dummyValueX",
- "meta": {
- "v": 1,
- "ct": 1392371422.014058
- },
- "doc_date": "ISODate('2014-02-14T09:50:22.014Z')",
- "_id": "66730b9534d146f0804d23729ad35436"
- }
- ]
-}
-
-
-COLLECTIONS = {}
-SON_MANIPULATOR = None
-
-
-class MockCursor(object):
-
- def __init__(self, collection, dataset_factory):
- super(MockCursor, self).__init__()
- self.collection = collection
- self._factory = dataset_factory
- self._dataset = self._factory()
- self._limit = None
- self._skip = None
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if self._skip:
- for _ in range(self._skip):
- next(self._dataset)
- self._skip = None
- if self._limit is not None and self._limit <= 0:
- raise StopIteration()
- if self._limit is not None:
- self._limit -= 1
- return next(self._dataset)
-
- next = __next__
-
- def __getitem__(self, index):
- arr = [x for x in self._dataset]
- self._dataset = iter(arr)
- return arr[index]
-
-
-class MockCollection(object):
-
- def __init__(self, db, name):
- super(MockCollection, self).__init__()
- self.name = name
- self._collection_database = db
- self._documents = {}
- self.write_concern = {}
-
- def __getattr__(self, name):
- if name == 'database':
- return self._collection_database
-
- def ensure_index(self, key_or_list, *args, **kwargs):
- pass
-
- def index_information(self):
- return {}
-
- def find_one(self, spec_or_id=None, *args, **kwargs):
- if spec_or_id is None:
- spec_or_id = {}
- if not isinstance(spec_or_id, collections.Mapping):
- spec_or_id = {'_id': spec_or_id}
-
- try:
- return next(self.find(spec_or_id, *args, **kwargs))
- except StopIteration:
- return None
-
- def find(self, spec=None, *args, **kwargs):
- return MockCursor(self, functools.partial(self._get_dataset, spec))
-
- def _get_dataset(self, spec):
- dataset = (self._copy_doc(document, dict) for document in
- self._iter_documents(spec))
- return dataset
-
- def _iter_documents(self, spec=None):
- return (SON_MANIPULATOR.transform_outgoing(document, self) for
- document in six.itervalues(self._documents)
- if self._apply_filter(document, spec))
-
- def _apply_filter(self, document, query):
- for key, search in query.items():
- doc_val = document.get(key)
- if isinstance(search, dict):
- op_dict = {'$in': lambda dv, sv: dv in sv}
- is_match = all(
- op_str in op_dict and op_dict[op_str](doc_val, search_val)
- for op_str, search_val in search.items()
- )
- else:
- is_match = doc_val == search
-
- return is_match
-
- def _copy_doc(self, obj, container):
- if isinstance(obj, list):
- new = []
- for item in obj:
- new.append(self._copy_doc(item, container))
- return new
- if isinstance(obj, dict):
- new = container()
- for key, value in list(obj.items()):
- new[key] = self._copy_doc(value, container)
- return new
- else:
- return copy.copy(obj)
-
- def insert(self, data, manipulate=True, **kwargs):
- if isinstance(data, list):
- return [self._insert(element) for element in data]
- return self._insert(data)
-
- def save(self, data, manipulate=True, **kwargs):
- return self._insert(data)
-
- def _insert(self, data):
- if '_id' not in data:
- data['_id'] = uuid.uuid4().hex
- object_id = data['_id']
- self._documents[object_id] = self._internalize_dict(data)
- return object_id
-
- def find_and_modify(self, spec, document, upsert=False, **kwargs):
- self.update(spec, document, upsert, **kwargs)
-
- def update(self, spec, document, upsert=False, **kwargs):
-
- existing_docs = [doc for doc in six.itervalues(self._documents)
- if self._apply_filter(doc, spec)]
- if existing_docs:
- existing_doc = existing_docs[0] # should find only 1 match
- _id = existing_doc['_id']
- existing_doc.clear()
- existing_doc['_id'] = _id
- existing_doc.update(self._internalize_dict(document))
- elif upsert:
- existing_doc = self._documents[self._insert(document)]
-
- def _internalize_dict(self, d):
- return {k: copy.deepcopy(v) for k, v in d.items()}
-
- def remove(self, spec_or_id=None, search_filter=None):
- """Remove objects matching spec_or_id from the collection."""
- if spec_or_id is None:
- spec_or_id = search_filter if search_filter else {}
- if not isinstance(spec_or_id, dict):
- spec_or_id = {'_id': spec_or_id}
- to_delete = list(self.find(spec=spec_or_id))
- for doc in to_delete:
- doc_id = doc['_id']
- del self._documents[doc_id]
-
- return {
- "connectionId": uuid.uuid4().hex,
- "n": len(to_delete),
- "ok": 1.0,
- "err": None,
- }
-
-
-class MockMongoDB(object):
- def __init__(self, dbname):
- self._dbname = dbname
- self.mainpulator = None
-
- def authenticate(self, username, password):
- pass
-
- def add_son_manipulator(self, manipulator):
- global SON_MANIPULATOR
- SON_MANIPULATOR = manipulator
-
- def __getattr__(self, name):
- if name == 'authenticate':
- return self.authenticate
- elif name == 'name':
- return self._dbname
- elif name == 'add_son_manipulator':
- return self.add_son_manipulator
- else:
- return get_collection(self._dbname, name)
-
- def __getitem__(self, name):
- return get_collection(self._dbname, name)
-
-
-class MockMongoClient(object):
- def __init__(self, *args, **kwargs):
- pass
-
- def __getattr__(self, dbname):
- return MockMongoDB(dbname)
-
-
-def get_collection(db_name, collection_name):
- mongo_collection = MockCollection(MockMongoDB(db_name), collection_name)
- return mongo_collection
-
-
-def pymongo_override():
- global pymongo
- import pymongo
- if pymongo.MongoClient is not MockMongoClient:
- pymongo.MongoClient = MockMongoClient
- if pymongo.MongoReplicaSetClient is not MockMongoClient:
- pymongo.MongoClient = MockMongoClient
-
-
-class MyTransformer(mongo.BaseTransform):
- """Added here just to check manipulator logic is used correctly."""
-
- def transform_incoming(self, son, collection):
- return super(MyTransformer, self).transform_incoming(son, collection)
-
- def transform_outgoing(self, son, collection):
- return super(MyTransformer, self).transform_outgoing(son, collection)
-
-
-class MongoCache(unit.BaseTestCase):
- def setUp(self):
- super(MongoCache, self).setUp()
- global COLLECTIONS
- COLLECTIONS = {}
- mongo.MongoApi._DB = {}
- mongo.MongoApi._MONGO_COLLS = {}
- pymongo_override()
- # using typical configuration
- self.arguments = {
- 'db_hosts': 'localhost:27017',
- 'db_name': 'ks_cache',
- 'cache_collection': 'cache',
- 'username': 'test_user',
- 'password': 'test_password'
- }
-
- def test_missing_db_hosts(self):
- self.arguments.pop('db_hosts')
- region = dp_region.make_region()
- self.assertRaises(exception.ValidationError, region.configure,
- 'keystone.cache.mongo',
- arguments=self.arguments)
-
- def test_missing_db_name(self):
- self.arguments.pop('db_name')
- region = dp_region.make_region()
- self.assertRaises(exception.ValidationError, region.configure,
- 'keystone.cache.mongo',
- arguments=self.arguments)
-
- def test_missing_cache_collection_name(self):
- self.arguments.pop('cache_collection')
- region = dp_region.make_region()
- self.assertRaises(exception.ValidationError, region.configure,
- 'keystone.cache.mongo',
- arguments=self.arguments)
-
- def test_incorrect_write_concern(self):
- self.arguments['w'] = 'one value'
- region = dp_region.make_region()
- self.assertRaises(exception.ValidationError, region.configure,
- 'keystone.cache.mongo',
- arguments=self.arguments)
-
- def test_correct_write_concern(self):
- self.arguments['w'] = 1
- region = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=self.arguments)
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue10")
- # There is no proxy so can access MongoCacheBackend directly
- self.assertEqual(1, region.backend.api.w)
-
- def test_incorrect_read_preference(self):
- self.arguments['read_preference'] = 'inValidValue'
- region = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=self.arguments)
- # As per delayed loading of pymongo, read_preference value should
- # still be string and NOT enum
- self.assertEqual('inValidValue', region.backend.api.read_preference)
-
- random_key = uuid.uuid4().hex
- self.assertRaises(ValueError, region.set,
- random_key, "dummyValue10")
-
- def test_correct_read_preference(self):
- self.arguments['read_preference'] = 'secondaryPreferred'
- region = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=self.arguments)
- # As per delayed loading of pymongo, read_preference value should
- # still be string and NOT enum
- self.assertEqual('secondaryPreferred',
- region.backend.api.read_preference)
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue10")
-
- # Now as pymongo is loaded so expected read_preference value is enum.
- # There is no proxy so can access MongoCacheBackend directly
- self.assertEqual(3, region.backend.api.read_preference)
-
- def test_missing_replica_set_name(self):
- self.arguments['use_replica'] = True
- region = dp_region.make_region()
- self.assertRaises(exception.ValidationError, region.configure,
- 'keystone.cache.mongo',
- arguments=self.arguments)
-
- def test_provided_replica_set_name(self):
- self.arguments['use_replica'] = True
- self.arguments['replicaset_name'] = 'my_replica'
- dp_region.make_region().configure('keystone.cache.mongo',
- arguments=self.arguments)
- self.assertTrue(True) # reached here means no initialization error
-
- def test_incorrect_mongo_ttl_seconds(self):
- self.arguments['mongo_ttl_seconds'] = 'sixty'
- region = dp_region.make_region()
- self.assertRaises(exception.ValidationError, region.configure,
- 'keystone.cache.mongo',
- arguments=self.arguments)
-
- def test_cache_configuration_values_assertion(self):
- self.arguments['use_replica'] = True
- self.arguments['replicaset_name'] = 'my_replica'
- self.arguments['mongo_ttl_seconds'] = 60
- self.arguments['ssl'] = False
- region = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=self.arguments)
- # There is no proxy so can access MongoCacheBackend directly
- self.assertEqual('localhost:27017', region.backend.api.hosts)
- self.assertEqual('ks_cache', region.backend.api.db_name)
- self.assertEqual('cache', region.backend.api.cache_collection)
- self.assertEqual('test_user', region.backend.api.username)
- self.assertEqual('test_password', region.backend.api.password)
- self.assertEqual(True, region.backend.api.use_replica)
- self.assertEqual('my_replica', region.backend.api.replicaset_name)
- self.assertEqual(False, region.backend.api.conn_kwargs['ssl'])
- self.assertEqual(60, region.backend.api.ttl_seconds)
-
- def test_multiple_region_cache_configuration(self):
- arguments1 = copy.copy(self.arguments)
- arguments1['cache_collection'] = 'cache_region1'
-
- region1 = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=arguments1)
- # There is no proxy so can access MongoCacheBackend directly
- self.assertEqual('localhost:27017', region1.backend.api.hosts)
- self.assertEqual('ks_cache', region1.backend.api.db_name)
- self.assertEqual('cache_region1', region1.backend.api.cache_collection)
- self.assertEqual('test_user', region1.backend.api.username)
- self.assertEqual('test_password', region1.backend.api.password)
- # Should be None because of delayed initialization
- self.assertIsNone(region1.backend.api._data_manipulator)
-
- random_key1 = uuid.uuid4().hex
- region1.set(random_key1, "dummyValue10")
- self.assertEqual("dummyValue10", region1.get(random_key1))
- # Now should have initialized
- self.assertIsInstance(region1.backend.api._data_manipulator,
- mongo.BaseTransform)
-
- class_name = '%s.%s' % (MyTransformer.__module__, "MyTransformer")
-
- arguments2 = copy.copy(self.arguments)
- arguments2['cache_collection'] = 'cache_region2'
- arguments2['son_manipulator'] = class_name
-
- region2 = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=arguments2)
- # There is no proxy so can access MongoCacheBackend directly
- self.assertEqual('localhost:27017', region2.backend.api.hosts)
- self.assertEqual('ks_cache', region2.backend.api.db_name)
- self.assertEqual('cache_region2', region2.backend.api.cache_collection)
-
- # Should be None because of delayed initialization
- self.assertIsNone(region2.backend.api._data_manipulator)
-
- random_key = uuid.uuid4().hex
- region2.set(random_key, "dummyValue20")
- self.assertEqual("dummyValue20", region2.get(random_key))
- # Now should have initialized
- self.assertIsInstance(region2.backend.api._data_manipulator,
- MyTransformer)
-
- region1.set(random_key1, "dummyValue22")
- self.assertEqual("dummyValue22", region1.get(random_key1))
-
- def test_typical_configuration(self):
-
- dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
- self.assertTrue(True) # reached here means no initialization error
-
- def test_backend_get_missing_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
-
- random_key = uuid.uuid4().hex
- # should return NO_VALUE as key does not exist in cache
- self.assertEqual(api.NO_VALUE, region.get(random_key))
-
- def test_backend_set_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue")
- self.assertEqual("dummyValue", region.get(random_key))
-
- def test_backend_set_data_with_string_as_valid_ttl(self):
-
- self.arguments['mongo_ttl_seconds'] = '3600'
- region = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=self.arguments)
- self.assertEqual(3600, region.backend.api.ttl_seconds)
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue")
- self.assertEqual("dummyValue", region.get(random_key))
-
- def test_backend_set_data_with_int_as_valid_ttl(self):
-
- self.arguments['mongo_ttl_seconds'] = 1800
- region = dp_region.make_region().configure('keystone.cache.mongo',
- arguments=self.arguments)
- self.assertEqual(1800, region.backend.api.ttl_seconds)
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue")
- self.assertEqual("dummyValue", region.get(random_key))
-
- def test_backend_set_none_as_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
-
- random_key = uuid.uuid4().hex
- region.set(random_key, None)
- self.assertIsNone(region.get(random_key))
-
- def test_backend_set_blank_as_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "")
- self.assertEqual("", region.get(random_key))
-
- def test_backend_set_same_key_multiple_times(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue")
- self.assertEqual("dummyValue", region.get(random_key))
-
- dict_value = {'key1': 'value1'}
- region.set(random_key, dict_value)
- self.assertEqual(dict_value, region.get(random_key))
-
- region.set(random_key, "dummyValue2")
- self.assertEqual("dummyValue2", region.get(random_key))
-
- def test_backend_multi_set_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
- random_key = uuid.uuid4().hex
- random_key1 = uuid.uuid4().hex
- random_key2 = uuid.uuid4().hex
- random_key3 = uuid.uuid4().hex
- mapping = {random_key1: 'dummyValue1',
- random_key2: 'dummyValue2',
- random_key3: 'dummyValue3'}
- region.set_multi(mapping)
- # should return NO_VALUE as key does not exist in cache
- self.assertEqual(api.NO_VALUE, region.get(random_key))
- self.assertFalse(region.get(random_key))
- self.assertEqual("dummyValue1", region.get(random_key1))
- self.assertEqual("dummyValue2", region.get(random_key2))
- self.assertEqual("dummyValue3", region.get(random_key3))
-
- def test_backend_multi_get_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
- random_key = uuid.uuid4().hex
- random_key1 = uuid.uuid4().hex
- random_key2 = uuid.uuid4().hex
- random_key3 = uuid.uuid4().hex
- mapping = {random_key1: 'dummyValue1',
- random_key2: '',
- random_key3: 'dummyValue3'}
- region.set_multi(mapping)
-
- keys = [random_key, random_key1, random_key2, random_key3]
- results = region.get_multi(keys)
- # should return NO_VALUE as key does not exist in cache
- self.assertEqual(api.NO_VALUE, results[0])
- self.assertEqual("dummyValue1", results[1])
- self.assertEqual("", results[2])
- self.assertEqual("dummyValue3", results[3])
-
- def test_backend_multi_set_should_update_existing(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
- random_key = uuid.uuid4().hex
- random_key1 = uuid.uuid4().hex
- random_key2 = uuid.uuid4().hex
- random_key3 = uuid.uuid4().hex
- mapping = {random_key1: 'dummyValue1',
- random_key2: 'dummyValue2',
- random_key3: 'dummyValue3'}
- region.set_multi(mapping)
- # should return NO_VALUE as key does not exist in cache
- self.assertEqual(api.NO_VALUE, region.get(random_key))
- self.assertEqual("dummyValue1", region.get(random_key1))
- self.assertEqual("dummyValue2", region.get(random_key2))
- self.assertEqual("dummyValue3", region.get(random_key3))
-
- mapping = {random_key1: 'dummyValue4',
- random_key2: 'dummyValue5'}
- region.set_multi(mapping)
- self.assertEqual(api.NO_VALUE, region.get(random_key))
- self.assertEqual("dummyValue4", region.get(random_key1))
- self.assertEqual("dummyValue5", region.get(random_key2))
- self.assertEqual("dummyValue3", region.get(random_key3))
-
- def test_backend_multi_set_get_with_blanks_none(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
- random_key = uuid.uuid4().hex
- random_key1 = uuid.uuid4().hex
- random_key2 = uuid.uuid4().hex
- random_key3 = uuid.uuid4().hex
- random_key4 = uuid.uuid4().hex
- mapping = {random_key1: 'dummyValue1',
- random_key2: None,
- random_key3: '',
- random_key4: 'dummyValue4'}
- region.set_multi(mapping)
- # should return NO_VALUE as key does not exist in cache
- self.assertEqual(api.NO_VALUE, region.get(random_key))
- self.assertEqual("dummyValue1", region.get(random_key1))
- self.assertIsNone(region.get(random_key2))
- self.assertEqual("", region.get(random_key3))
- self.assertEqual("dummyValue4", region.get(random_key4))
-
- keys = [random_key, random_key1, random_key2, random_key3, random_key4]
- results = region.get_multi(keys)
-
- # should return NO_VALUE as key does not exist in cache
- self.assertEqual(api.NO_VALUE, results[0])
- self.assertEqual("dummyValue1", results[1])
- self.assertIsNone(results[2])
- self.assertEqual("", results[3])
- self.assertEqual("dummyValue4", results[4])
-
- mapping = {random_key1: 'dummyValue5',
- random_key2: 'dummyValue6'}
- region.set_multi(mapping)
- self.assertEqual(api.NO_VALUE, region.get(random_key))
- self.assertEqual("dummyValue5", region.get(random_key1))
- self.assertEqual("dummyValue6", region.get(random_key2))
- self.assertEqual("", region.get(random_key3))
-
- def test_backend_delete_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue")
- self.assertEqual("dummyValue", region.get(random_key))
-
- region.delete(random_key)
- # should return NO_VALUE as key no longer exists in cache
- self.assertEqual(api.NO_VALUE, region.get(random_key))
-
- def test_backend_multi_delete_data(self):
-
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
- random_key = uuid.uuid4().hex
- random_key1 = uuid.uuid4().hex
- random_key2 = uuid.uuid4().hex
- random_key3 = uuid.uuid4().hex
- mapping = {random_key1: 'dummyValue1',
- random_key2: 'dummyValue2',
- random_key3: 'dummyValue3'}
- region.set_multi(mapping)
- # should return NO_VALUE as key does not exist in cache
- self.assertEqual(api.NO_VALUE, region.get(random_key))
- self.assertEqual("dummyValue1", region.get(random_key1))
- self.assertEqual("dummyValue2", region.get(random_key2))
- self.assertEqual("dummyValue3", region.get(random_key3))
- self.assertEqual(api.NO_VALUE, region.get("InvalidKey"))
-
- keys = mapping.keys()
-
- region.delete_multi(keys)
-
- self.assertEqual(api.NO_VALUE, region.get("InvalidKey"))
- # should return NO_VALUE as keys no longer exist in cache
- self.assertEqual(api.NO_VALUE, region.get(random_key1))
- self.assertEqual(api.NO_VALUE, region.get(random_key2))
- self.assertEqual(api.NO_VALUE, region.get(random_key3))
-
- def test_additional_crud_method_arguments_support(self):
- """Additional arguments should works across find/insert/update."""
-
- self.arguments['wtimeout'] = 30000
- self.arguments['j'] = True
- self.arguments['continue_on_error'] = True
- self.arguments['secondary_acceptable_latency_ms'] = 60
- region = dp_region.make_region().configure(
- 'keystone.cache.mongo',
- arguments=self.arguments
- )
-
- # There is no proxy so can access MongoCacheBackend directly
- api_methargs = region.backend.api.meth_kwargs
- self.assertEqual(30000, api_methargs['wtimeout'])
- self.assertEqual(True, api_methargs['j'])
- self.assertEqual(True, api_methargs['continue_on_error'])
- self.assertEqual(60, api_methargs['secondary_acceptable_latency_ms'])
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue1")
- self.assertEqual("dummyValue1", region.get(random_key))
-
- region.set(random_key, "dummyValue2")
- self.assertEqual("dummyValue2", region.get(random_key))
-
- random_key = uuid.uuid4().hex
- region.set(random_key, "dummyValue3")
- self.assertEqual("dummyValue3", region.get(random_key))
diff --git a/keystone-moon/keystone/tests/unit/test_catalog.py b/keystone-moon/keystone/tests/unit/test_catalog.py
deleted file mode 100644
index 76e3055a..00000000
--- a/keystone-moon/keystone/tests/unit/test_catalog.py
+++ /dev/null
@@ -1,355 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from six.moves import http_client
-
-from keystone import catalog
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit import rest
-
-
-BASE_URL = 'http://127.0.0.1:35357/v2'
-SERVICE_FIXTURE = object()
-
-
-class V2CatalogTestCase(rest.RestfulTestCase):
- def setUp(self):
- super(V2CatalogTestCase, self).setUp()
- self.useFixture(database.Database())
-
- self.service = unit.new_service_ref()
- self.service_id = self.service['id']
- self.catalog_api.create_service(self.service_id, self.service)
-
- # TODO(termie): add an admin user to the fixtures and use that user
- # override the fixtures, for now
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_admin['id'])
-
- def config_overrides(self):
- super(V2CatalogTestCase, self).config_overrides()
- self.config_fixture.config(group='catalog', driver='sql')
-
- def _get_token_id(self, r):
- """Applicable only to JSON."""
- return r.result['access']['token']['id']
-
- def _endpoint_create(self, expected_status=http_client.OK,
- service_id=SERVICE_FIXTURE,
- publicurl='http://localhost:8080',
- internalurl='http://localhost:8080',
- adminurl='http://localhost:8080'):
- if service_id is SERVICE_FIXTURE:
- service_id = self.service_id
-
- path = '/v2.0/endpoints'
- body = {
- 'endpoint': {
- 'adminurl': adminurl,
- 'service_id': service_id,
- 'region': 'RegionOne',
- 'internalurl': internalurl,
- 'publicurl': publicurl
- }
- }
-
- r = self.admin_request(method='POST', token=self.get_scoped_token(),
- path=path, expected_status=expected_status,
- body=body)
- return body, r
-
- def _region_create(self):
- region = unit.new_region_ref()
- region_id = region['id']
- self.catalog_api.create_region(region)
- return region_id
-
- def test_endpoint_create(self):
- req_body, response = self._endpoint_create()
- self.assertIn('endpoint', response.result)
- self.assertIn('id', response.result['endpoint'])
- for field, value in req_body['endpoint'].items():
- self.assertEqual(value, response.result['endpoint'][field])
-
- def test_pure_v3_endpoint_with_publicurl_visible_from_v2(self):
- """Test pure v3 endpoint can be fetched via v2.0 API.
-
- For those who are using v2.0 APIs, endpoints created by v3 API should
- also be visible as there are no differences about the endpoints
- except the format or the internal implementation. Since publicURL is
- required for v2.0 API, so only v3 endpoints of the service which have
- the public interface endpoint will be converted into v2.0 endpoints.
- """
- region_id = self._region_create()
-
- # create v3 endpoints with three interfaces
- body = {
- 'endpoint': unit.new_endpoint_ref(self.service_id,
- region_id=region_id)
- }
- for interface in catalog.controllers.INTERFACES:
- body['endpoint']['interface'] = interface
- self.admin_request(method='POST',
- token=self.get_scoped_token(),
- path='/v3/endpoints',
- expected_status=http_client.CREATED,
- body=body)
-
- r = self.admin_request(token=self.get_scoped_token(),
- path='/v2.0/endpoints')
- # Endpoints of the service which have a public interface endpoint
- # will be returned via v2.0 API
- self.assertEqual(1, len(r.result['endpoints']))
- v2_endpoint = r.result['endpoints'][0]
- self.assertEqual(self.service_id, v2_endpoint['service_id'])
- # This is not the focus of this test, so no different urls are used.
- self.assertEqual(body['endpoint']['url'], v2_endpoint['publicurl'])
- self.assertEqual(body['endpoint']['url'], v2_endpoint['adminurl'])
- self.assertEqual(body['endpoint']['url'], v2_endpoint['internalurl'])
- self.assertNotIn('name', v2_endpoint)
-
- v3_endpoint = self.catalog_api.get_endpoint(v2_endpoint['id'])
- # Checks the v3 public endpoint's id is the generated v2.0 endpoint
- self.assertEqual('public', v3_endpoint['interface'])
- self.assertEqual(self.service_id, v3_endpoint['service_id'])
-
- def test_pure_v3_endpoint_without_publicurl_invisible_from_v2(self):
- """Test that the v2.0 API can't fetch v3 endpoints without publicURLs.
-
- v2.0 API will return endpoints created by v3 API, but publicURL is
- required for the service in the v2.0 API, therefore v3 endpoints of
- a service which don't have publicURL will be ignored.
- """
- region_id = self._region_create()
-
- # create a v3 endpoint without public interface
- body = {
- 'endpoint': unit.new_endpoint_ref(self.service_id,
- region_id=region_id)
- }
- for interface in catalog.controllers.INTERFACES:
- if interface == 'public':
- continue
- body['endpoint']['interface'] = interface
- self.admin_request(method='POST',
- token=self.get_scoped_token(),
- path='/v3/endpoints',
- expected_status=http_client.CREATED,
- body=body)
-
- r = self.admin_request(token=self.get_scoped_token(),
- path='/v2.0/endpoints')
- # v3 endpoints of a service which don't have publicURL can't be
- # fetched via v2.0 API
- self.assertEqual(0, len(r.result['endpoints']))
-
- def test_endpoint_create_with_null_adminurl(self):
- req_body, response = self._endpoint_create(adminurl=None)
- self.assertIsNone(req_body['endpoint']['adminurl'])
- self.assertNotIn('adminurl', response.result['endpoint'])
-
- def test_endpoint_create_with_empty_adminurl(self):
- req_body, response = self._endpoint_create(adminurl='')
- self.assertEqual('', req_body['endpoint']['adminurl'])
- self.assertNotIn("adminurl", response.result['endpoint'])
-
- def test_endpoint_create_with_null_internalurl(self):
- req_body, response = self._endpoint_create(internalurl=None)
- self.assertIsNone(req_body['endpoint']['internalurl'])
- self.assertNotIn('internalurl', response.result['endpoint'])
-
- def test_endpoint_create_with_empty_internalurl(self):
- req_body, response = self._endpoint_create(internalurl='')
- self.assertEqual('', req_body['endpoint']['internalurl'])
- self.assertNotIn("internalurl", response.result['endpoint'])
-
- def test_endpoint_create_with_null_publicurl(self):
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=None)
-
- def test_endpoint_create_with_empty_publicurl(self):
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl='')
-
- def test_endpoint_create_with_null_service_id(self):
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- service_id=None)
-
- def test_endpoint_create_with_empty_service_id(self):
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- service_id='')
-
- def test_endpoint_create_with_valid_url(self):
- """Create endpoint with valid URL should be tested, too."""
- # list one valid url is enough, no need to list too much
- valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
-
- # baseline tests that all valid URLs works
- self._endpoint_create(expected_status=http_client.OK,
- publicurl=valid_url,
- internalurl=valid_url,
- adminurl=valid_url)
-
- def test_endpoint_create_with_invalid_url(self):
- """Test the invalid cases: substitutions is not exactly right."""
- invalid_urls = [
- # using a substitution that is not whitelisted - KeyError
- 'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
-
- # invalid formatting - ValueError
- 'http://127.0.0.1:8774/v1.1/$(tenant_id)',
- 'http://127.0.0.1:8774/v1.1/$(tenant_id)t',
- 'http://127.0.0.1:8774/v1.1/$(tenant_id',
-
- # invalid type specifier - TypeError
- # admin_url is a string not an int
- 'http://127.0.0.1:8774/v1.1/$(admin_url)d',
- ]
-
- # list one valid url is enough, no need to list too much
- valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
-
- # Case one: publicurl, internalurl and adminurl are
- # all invalid
- for invalid_url in invalid_urls:
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=invalid_url,
- internalurl=invalid_url,
- adminurl=invalid_url)
-
- # Case two: publicurl, internalurl are invalid
- # and adminurl is valid
- for invalid_url in invalid_urls:
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=invalid_url,
- internalurl=invalid_url,
- adminurl=valid_url)
-
- # Case three: publicurl, adminurl are invalid
- # and internalurl is valid
- for invalid_url in invalid_urls:
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=invalid_url,
- internalurl=valid_url,
- adminurl=invalid_url)
-
- # Case four: internalurl, adminurl are invalid
- # and publicurl is valid
- for invalid_url in invalid_urls:
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=valid_url,
- internalurl=invalid_url,
- adminurl=invalid_url)
-
- # Case five: publicurl is invalid, internalurl
- # and adminurl are valid
- for invalid_url in invalid_urls:
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=invalid_url,
- internalurl=valid_url,
- adminurl=valid_url)
-
- # Case six: internalurl is invalid, publicurl
- # and adminurl are valid
- for invalid_url in invalid_urls:
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=valid_url,
- internalurl=invalid_url,
- adminurl=valid_url)
-
- # Case seven: adminurl is invalid, publicurl
- # and internalurl are valid
- for invalid_url in invalid_urls:
- self._endpoint_create(expected_status=http_client.BAD_REQUEST,
- publicurl=valid_url,
- internalurl=valid_url,
- adminurl=invalid_url)
-
-
-class TestV2CatalogAPISQL(unit.TestCase):
-
- def setUp(self):
- super(TestV2CatalogAPISQL, self).setUp()
- self.useFixture(database.Database())
- self.catalog_api = catalog.Manager()
-
- service = unit.new_service_ref()
- self.service_id = service['id']
- self.catalog_api.create_service(self.service_id, service)
-
- self.create_endpoint(service_id=self.service_id)
-
- def create_endpoint(self, service_id, **kwargs):
- endpoint = unit.new_endpoint_ref(service_id=service_id,
- region_id=None,
- **kwargs)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- return endpoint
-
- def config_overrides(self):
- super(TestV2CatalogAPISQL, self).config_overrides()
- self.config_fixture.config(group='catalog', driver='sql')
-
- def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
- user_id = uuid.uuid4().hex
- tenant_id = uuid.uuid4().hex
-
- # the only endpoint in the catalog is the one created in setUp
- catalog = self.catalog_api.get_catalog(user_id, tenant_id)
- self.assertEqual(1, len(catalog))
- # it's also the only endpoint in the backend
- self.assertEqual(1, len(self.catalog_api.list_endpoints()))
-
- # create a new, invalid endpoint - malformed type declaration
- self.create_endpoint(self.service_id,
- url='http://keystone/%(tenant_id)')
-
- # create a new, invalid endpoint - nonexistent key
- self.create_endpoint(self.service_id,
- url='http://keystone/%(you_wont_find_me)s')
-
- # verify that the invalid endpoints don't appear in the catalog
- catalog = self.catalog_api.get_catalog(user_id, tenant_id)
- self.assertEqual(1, len(catalog))
- # all three endpoints appear in the backend
- self.assertEqual(3, len(self.catalog_api.list_endpoints()))
-
- def test_get_catalog_always_returns_service_name(self):
- user_id = uuid.uuid4().hex
- tenant_id = uuid.uuid4().hex
-
- # new_service_ref() returns a ref with a `name`.
- named_svc = unit.new_service_ref()
- self.catalog_api.create_service(named_svc['id'], named_svc)
- self.create_endpoint(service_id=named_svc['id'])
-
- # This time manually delete the generated `name`.
- unnamed_svc = unit.new_service_ref()
- del unnamed_svc['name']
- self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
- self.create_endpoint(service_id=unnamed_svc['id'])
-
- region = None
- catalog = self.catalog_api.get_catalog(user_id, tenant_id)
-
- self.assertEqual(named_svc['name'],
- catalog[region][named_svc['type']]['name'])
-
- # verify a name is not generated when the service is passed to the API
- self.assertEqual('', catalog[region][unnamed_svc['type']]['name'])
diff --git a/keystone-moon/keystone/tests/unit/test_cert_setup.py b/keystone-moon/keystone/tests/unit/test_cert_setup.py
deleted file mode 100644
index debf87f5..00000000
--- a/keystone-moon/keystone/tests/unit/test_cert_setup.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import shutil
-
-import mock
-from six.moves import http_client
-from testtools import matchers
-
-from keystone.common import environment
-from keystone.common import openssl
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import rest
-from keystone import token
-
-
-SSLDIR = unit.dirs.tmp('ssl')
-CONF = unit.CONF
-
-
-CERTDIR = os.path.join(SSLDIR, 'certs')
-KEYDIR = os.path.join(SSLDIR, 'private')
-
-
-class CertSetupTestCase(rest.RestfulTestCase):
-
- def setUp(self):
- super(CertSetupTestCase, self).setUp()
-
- def cleanup_ssldir():
- try:
- shutil.rmtree(SSLDIR)
- except OSError:
- pass
-
- self.addCleanup(cleanup_ssldir)
-
- def config_overrides(self):
- super(CertSetupTestCase, self).config_overrides()
- ca_certs = os.path.join(CERTDIR, 'ca.pem')
- ca_key = os.path.join(CERTDIR, 'cakey.pem')
-
- self.config_fixture.config(
- group='signing',
- certfile=os.path.join(CERTDIR, 'signing_cert.pem'),
- ca_certs=ca_certs,
- ca_key=ca_key,
- keyfile=os.path.join(KEYDIR, 'signing_key.pem'))
- self.config_fixture.config(
- group='ssl',
- ca_key=ca_key)
- self.config_fixture.config(
- group='eventlet_server_ssl',
- ca_certs=ca_certs,
- certfile=os.path.join(CERTDIR, 'keystone.pem'),
- keyfile=os.path.join(KEYDIR, 'keystonekey.pem'))
- self.config_fixture.config(group='token', provider='pkiz')
-
- def test_can_handle_missing_certs(self):
- controller = token.controllers.Auth()
-
- self.config_fixture.config(group='signing', certfile='invalid')
- user = unit.create_user(self.identity_api,
- domain_id=CONF.identity.default_domain_id)
- body_dict = {
- 'passwordCredentials': {
- 'userId': user['id'],
- 'password': user['password'],
- },
- }
- self.assertRaises(exception.UnexpectedError,
- controller.authenticate,
- {}, body_dict)
-
- def test_create_pki_certs(self, rebuild=False):
- pki = openssl.ConfigurePKI(None, None, rebuild=rebuild)
- pki.run()
- self.assertTrue(os.path.exists(CONF.signing.certfile))
- self.assertTrue(os.path.exists(CONF.signing.ca_certs))
- self.assertTrue(os.path.exists(CONF.signing.keyfile))
-
- def test_create_ssl_certs(self, rebuild=False):
- ssl = openssl.ConfigureSSL(None, None, rebuild=rebuild)
- ssl.run()
- self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.ca_certs))
- self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.certfile))
- self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.keyfile))
-
- def test_fetch_signing_cert(self, rebuild=False):
- pki = openssl.ConfigurePKI(None, None, rebuild=rebuild)
- pki.run()
-
- # NOTE(jamielennox): Use request directly because certificate
- # requests don't have some of the normal information
- signing_resp = self.request(self.public_app,
- '/v2.0/certificates/signing',
- method='GET',
- expected_status=http_client.OK)
-
- cacert_resp = self.request(self.public_app,
- '/v2.0/certificates/ca',
- method='GET',
- expected_status=http_client.OK)
-
- with open(CONF.signing.certfile) as f:
- self.assertEqual(f.read(), signing_resp.text)
-
- with open(CONF.signing.ca_certs) as f:
- self.assertEqual(f.read(), cacert_resp.text)
-
- # NOTE(jamielennox): This is weird behaviour that we need to enforce.
- # It doesn't matter what you ask for it's always going to give text
- # with a text/html content_type.
-
- for path in ['/v2.0/certificates/signing', '/v2.0/certificates/ca']:
- for accept in [None, 'text/html', 'application/json', 'text/xml']:
- headers = {'Accept': accept} if accept else {}
- resp = self.request(self.public_app, path, method='GET',
- expected_status=http_client.OK,
- headers=headers)
-
- self.assertEqual('text/html', resp.content_type)
-
- def test_fetch_signing_cert_when_rebuild(self):
- pki = openssl.ConfigurePKI(None, None)
- pki.run()
- self.test_fetch_signing_cert(rebuild=True)
-
- def test_failure(self):
- for path in ['/v2.0/certificates/signing', '/v2.0/certificates/ca']:
- self.request(self.public_app, path, method='GET',
- expected_status=http_client.INTERNAL_SERVER_ERROR)
-
- def test_pki_certs_rebuild(self):
- self.test_create_pki_certs()
- with open(CONF.signing.certfile) as f:
- cert_file1 = f.read()
-
- self.test_create_pki_certs(rebuild=True)
- with open(CONF.signing.certfile) as f:
- cert_file2 = f.read()
-
- self.assertNotEqual(cert_file1, cert_file2)
-
- def test_ssl_certs_rebuild(self):
- self.test_create_ssl_certs()
- with open(CONF.eventlet_server_ssl.certfile) as f:
- cert_file1 = f.read()
-
- self.test_create_ssl_certs(rebuild=True)
- with open(CONF.eventlet_server_ssl.certfile) as f:
- cert_file2 = f.read()
-
- self.assertNotEqual(cert_file1, cert_file2)
-
- @mock.patch.object(os, 'remove')
- def test_rebuild_pki_certs_remove_error(self, mock_remove):
- self.test_create_pki_certs()
- with open(CONF.signing.certfile) as f:
- cert_file1 = f.read()
-
- mock_remove.side_effect = OSError()
- self.test_create_pki_certs(rebuild=True)
- with open(CONF.signing.certfile) as f:
- cert_file2 = f.read()
-
- self.assertEqual(cert_file1, cert_file2)
-
- @mock.patch.object(os, 'remove')
- def test_rebuild_ssl_certs_remove_error(self, mock_remove):
- self.test_create_ssl_certs()
- with open(CONF.eventlet_server_ssl.certfile) as f:
- cert_file1 = f.read()
-
- mock_remove.side_effect = OSError()
- self.test_create_ssl_certs(rebuild=True)
- with open(CONF.eventlet_server_ssl.certfile) as f:
- cert_file2 = f.read()
-
- self.assertEqual(cert_file1, cert_file2)
-
- def test_create_pki_certs_twice_without_rebuild(self):
- self.test_create_pki_certs()
- with open(CONF.signing.certfile) as f:
- cert_file1 = f.read()
-
- self.test_create_pki_certs()
- with open(CONF.signing.certfile) as f:
- cert_file2 = f.read()
-
- self.assertEqual(cert_file1, cert_file2)
-
- def test_create_ssl_certs_twice_without_rebuild(self):
- self.test_create_ssl_certs()
- with open(CONF.eventlet_server_ssl.certfile) as f:
- cert_file1 = f.read()
-
- self.test_create_ssl_certs()
- with open(CONF.eventlet_server_ssl.certfile) as f:
- cert_file2 = f.read()
-
- self.assertEqual(cert_file1, cert_file2)
-
-
-class TestExecCommand(unit.TestCase):
-
- @mock.patch.object(environment.subprocess.Popen, 'poll')
- def test_running_a_successful_command(self, mock_poll):
- mock_poll.return_value = 0
-
- ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group')
- ssl.exec_command(['ls'])
-
- @mock.patch.object(environment.subprocess, 'check_output')
- def test_running_an_invalid_command(self, mock_check_output):
- cmd = ['ls']
-
- output = 'this is the output string'
-
- error = environment.subprocess.CalledProcessError(returncode=1,
- cmd=cmd,
- output=output)
- mock_check_output.side_effect = error
-
- ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group')
- e = self.assertRaises(environment.subprocess.CalledProcessError,
- ssl.exec_command,
- cmd)
- self.assertThat(e.output, matchers.Equals(output))
diff --git a/keystone-moon/keystone/tests/unit/test_cli.py b/keystone-moon/keystone/tests/unit/test_cli.py
deleted file mode 100644
index 06f2e172..00000000
--- a/keystone-moon/keystone/tests/unit/test_cli.py
+++ /dev/null
@@ -1,478 +0,0 @@
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-import uuid
-
-import fixtures
-import mock
-from oslo_config import cfg
-from six.moves import range
-from testtools import matchers
-
-from keystone.cmd import cli
-from keystone.common import dependency
-from keystone.i18n import _
-from keystone import resource
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-
-class CliTestCase(unit.SQLDriverOverrides, unit.TestCase):
- def config_files(self):
- config_files = super(CliTestCase, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def test_token_flush(self):
- self.useFixture(database.Database())
- self.load_backends()
- cli.TokenFlush.main()
-
-
-class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase):
-
- def setUp(self):
- self.useFixture(database.Database())
- super(CliBootStrapTestCase, self).setUp()
-
- def config_files(self):
- self.config_fixture.register_cli_opt(cli.command_opt)
- config_files = super(CliBootStrapTestCase, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def config(self, config_files):
- CONF(args=['bootstrap', '--bootstrap-password', uuid.uuid4().hex],
- project='keystone',
- default_config_files=config_files)
-
- def test_bootstrap(self):
- bootstrap = cli.BootStrap()
- self._do_test_bootstrap(bootstrap)
-
- def _do_test_bootstrap(self, bootstrap):
- bootstrap.do_bootstrap()
- project = bootstrap.resource_manager.get_project_by_name(
- bootstrap.project_name,
- 'default')
- user = bootstrap.identity_manager.get_user_by_name(
- bootstrap.username,
- 'default')
- role = bootstrap.role_manager.get_role(bootstrap.role_id)
- role_list = (
- bootstrap.assignment_manager.get_roles_for_user_and_project(
- user['id'],
- project['id']))
- self.assertIs(len(role_list), 1)
- self.assertEqual(role_list[0], role['id'])
- # NOTE(morganfainberg): Pass an empty context, it isn't used by
- # `authenticate` method.
- bootstrap.identity_manager.authenticate(
- {},
- user['id'],
- bootstrap.password)
-
- if bootstrap.region_id:
- region = bootstrap.catalog_manager.get_region(bootstrap.region_id)
- self.assertEqual(self.region_id, region['id'])
-
- if bootstrap.service_id:
- svc = bootstrap.catalog_manager.get_service(bootstrap.service_id)
- self.assertEqual(self.service_name, svc['name'])
-
- self.assertEqual(set(['admin', 'public', 'internal']),
- set(bootstrap.endpoints))
-
- urls = {'public': self.public_url,
- 'internal': self.internal_url,
- 'admin': self.admin_url}
-
- for interface, url in urls.items():
- endpoint_id = bootstrap.endpoints[interface]
- endpoint = bootstrap.catalog_manager.get_endpoint(endpoint_id)
-
- self.assertEqual(self.region_id, endpoint['region_id'])
- self.assertEqual(url, endpoint['url'])
- self.assertEqual(svc['id'], endpoint['service_id'])
- self.assertEqual(interface, endpoint['interface'])
-
- def test_bootstrap_is_idempotent(self):
- # NOTE(morganfainberg): Ensure we can run bootstrap multiple times
- # without erroring.
- bootstrap = cli.BootStrap()
- self._do_test_bootstrap(bootstrap)
- self._do_test_bootstrap(bootstrap)
-
-
-class CliBootStrapTestCaseWithEnvironment(CliBootStrapTestCase):
-
- def config(self, config_files):
- CONF(args=['bootstrap'], project='keystone',
- default_config_files=config_files)
-
- def setUp(self):
- super(CliBootStrapTestCaseWithEnvironment, self).setUp()
- self.password = uuid.uuid4().hex
- self.username = uuid.uuid4().hex
- self.project_name = uuid.uuid4().hex
- self.role_name = uuid.uuid4().hex
- self.service_name = uuid.uuid4().hex
- self.public_url = uuid.uuid4().hex
- self.internal_url = uuid.uuid4().hex
- self.admin_url = uuid.uuid4().hex
- self.region_id = uuid.uuid4().hex
- self.default_domain = {
- 'id': CONF.identity.default_domain_id,
- 'name': 'Default',
- }
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_PASSWORD',
- newvalue=self.password))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_USERNAME',
- newvalue=self.username))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_PROJECT_NAME',
- newvalue=self.project_name))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_ROLE_NAME',
- newvalue=self.role_name))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_SERVICE_NAME',
- newvalue=self.service_name))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_PUBLIC_URL',
- newvalue=self.public_url))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_INTERNAL_URL',
- newvalue=self.internal_url))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_ADMIN_URL',
- newvalue=self.admin_url))
- self.useFixture(
- fixtures.EnvironmentVariable('OS_BOOTSTRAP_REGION_ID',
- newvalue=self.region_id))
-
- def test_assignment_created_with_user_exists(self):
- # test assignment can be created if user already exists.
- bootstrap = cli.BootStrap()
- bootstrap.resource_manager.create_domain(self.default_domain['id'],
- self.default_domain)
- user_ref = unit.new_user_ref(self.default_domain['id'],
- name=self.username,
- password=self.password)
- bootstrap.identity_manager.create_user(user_ref)
- self._do_test_bootstrap(bootstrap)
-
- def test_assignment_created_with_project_exists(self):
- # test assignment can be created if project already exists.
- bootstrap = cli.BootStrap()
- bootstrap.resource_manager.create_domain(self.default_domain['id'],
- self.default_domain)
- project_ref = unit.new_project_ref(self.default_domain['id'],
- name=self.project_name)
- bootstrap.resource_manager.create_project(project_ref['id'],
- project_ref)
- self._do_test_bootstrap(bootstrap)
-
- def test_assignment_created_with_role_exists(self):
- # test assignment can be created if role already exists.
- bootstrap = cli.BootStrap()
- bootstrap.resource_manager.create_domain(self.default_domain['id'],
- self.default_domain)
- role = unit.new_role_ref(name=self.role_name)
- bootstrap.role_manager.create_role(role['id'], role)
- self._do_test_bootstrap(bootstrap)
-
- def test_assignment_created_with_region_exists(self):
- # test assignment can be created if role already exists.
- bootstrap = cli.BootStrap()
- bootstrap.resource_manager.create_domain(self.default_domain['id'],
- self.default_domain)
- region = unit.new_region_ref(id=self.region_id)
- bootstrap.catalog_manager.create_region(region)
- self._do_test_bootstrap(bootstrap)
-
- def test_endpoints_created_with_service_exists(self):
- # test assignment can be created if role already exists.
- bootstrap = cli.BootStrap()
- bootstrap.resource_manager.create_domain(self.default_domain['id'],
- self.default_domain)
- service = unit.new_service_ref(name=self.service_name)
- bootstrap.catalog_manager.create_service(service['id'], service)
- self._do_test_bootstrap(bootstrap)
-
- def test_endpoints_created_with_endpoint_exists(self):
- # test assignment can be created if role already exists.
- bootstrap = cli.BootStrap()
- bootstrap.resource_manager.create_domain(self.default_domain['id'],
- self.default_domain)
- service = unit.new_service_ref(name=self.service_name)
- bootstrap.catalog_manager.create_service(service['id'], service)
-
- region = unit.new_region_ref(id=self.region_id)
- bootstrap.catalog_manager.create_region(region)
-
- endpoint = unit.new_endpoint_ref(interface='public',
- service_id=service['id'],
- url=self.public_url,
- region_id=self.region_id)
- bootstrap.catalog_manager.create_endpoint(endpoint['id'], endpoint)
-
- self._do_test_bootstrap(bootstrap)
-
-
-class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase):
-
- def setUp(self):
- self.useFixture(database.Database())
- super(CliDomainConfigAllTestCase, self).setUp()
- self.load_backends()
- self.config_fixture.config(
- group='identity',
- domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap')
- self.domain_count = 3
- self.setup_initial_domains()
-
- def config_files(self):
- self.config_fixture.register_cli_opt(cli.command_opt)
- self.addCleanup(self.cleanup)
- config_files = super(CliDomainConfigAllTestCase, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def cleanup(self):
- CONF.reset()
- CONF.unregister_opt(cli.command_opt)
-
- def cleanup_domains(self):
- for domain in self.domains:
- if domain == 'domain_default':
- # Not allowed to delete the default domain, but should at least
- # delete any domain-specific config for it.
- self.domain_config_api.delete_config(
- CONF.identity.default_domain_id)
- continue
- this_domain = self.domains[domain]
- this_domain['enabled'] = False
- self.resource_api.update_domain(this_domain['id'], this_domain)
- self.resource_api.delete_domain(this_domain['id'])
- self.domains = {}
-
- def config(self, config_files):
- CONF(args=['domain_config_upload', '--all'], project='keystone',
- default_config_files=config_files)
-
- def setup_initial_domains(self):
-
- def create_domain(domain):
- return self.resource_api.create_domain(domain['id'], domain)
-
- self.domains = {}
- self.addCleanup(self.cleanup_domains)
- for x in range(1, self.domain_count):
- domain = 'domain%s' % x
- self.domains[domain] = create_domain(
- {'id': uuid.uuid4().hex, 'name': domain})
- self.domains['domain_default'] = create_domain(
- resource.calc_default_domain())
-
- def test_config_upload(self):
- # The values below are the same as in the domain_configs_multi_ldap
- # directory of test config_files.
- default_config = {
- 'ldap': {'url': 'fake://memory',
- 'user': 'cn=Admin',
- 'password': 'password',
- 'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'ldap'}
- }
- domain1_config = {
- 'ldap': {'url': 'fake://memory1',
- 'user': 'cn=Admin',
- 'password': 'password',
- 'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'ldap',
- 'list_limit': '101'}
- }
- domain2_config = {
- 'ldap': {'url': 'fake://memory',
- 'user': 'cn=Admin',
- 'password': 'password',
- 'suffix': 'cn=myroot,cn=com',
- 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
- 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
- 'identity': {'driver': 'ldap'}
- }
-
- # Clear backend dependencies, since cli loads these manually
- dependency.reset()
- cli.DomainConfigUpload.main()
-
- res = self.domain_config_api.get_config_with_sensitive_info(
- CONF.identity.default_domain_id)
- self.assertEqual(default_config, res)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domains['domain1']['id'])
- self.assertEqual(domain1_config, res)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domains['domain2']['id'])
- self.assertEqual(domain2_config, res)
-
-
-class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase):
-
- def config(self, config_files):
- CONF(args=['domain_config_upload', '--domain-name', 'Default'],
- project='keystone', default_config_files=config_files)
-
- def test_config_upload(self):
- # The values below are the same as in the domain_configs_multi_ldap
- # directory of test config_files.
- default_config = {
- 'ldap': {'url': 'fake://memory',
- 'user': 'cn=Admin',
- 'password': 'password',
- 'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'ldap'}
- }
-
- # Clear backend dependencies, since cli loads these manually
- dependency.reset()
- cli.DomainConfigUpload.main()
-
- res = self.domain_config_api.get_config_with_sensitive_info(
- CONF.identity.default_domain_id)
- self.assertEqual(default_config, res)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domains['domain1']['id'])
- self.assertEqual({}, res)
- res = self.domain_config_api.get_config_with_sensitive_info(
- self.domains['domain2']['id'])
- self.assertEqual({}, res)
-
- def test_no_overwrite_config(self):
- # Create a config for the default domain
- default_config = {
- 'ldap': {'url': uuid.uuid4().hex},
- 'identity': {'driver': 'ldap'}
- }
- self.domain_config_api.create_config(
- CONF.identity.default_domain_id, default_config)
-
- # Now try and upload the settings in the configuration file for the
- # default domain
- dependency.reset()
- with mock.patch('six.moves.builtins.print') as mock_print:
- self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
- file_name = ('keystone.%s.conf' %
- resource.calc_default_domain()['name'])
- error_msg = _(
- 'Domain: %(domain)s already has a configuration defined - '
- 'ignoring file: %(file)s.') % {
- 'domain': resource.calc_default_domain()['name'],
- 'file': os.path.join(CONF.identity.domain_config_dir,
- file_name)}
- mock_print.assert_has_calls([mock.call(error_msg)])
-
- res = self.domain_config_api.get_config(
- CONF.identity.default_domain_id)
- # The initial config should not have been overwritten
- self.assertEqual(default_config, res)
-
-
-class CliDomainConfigNoOptionsTestCase(CliDomainConfigAllTestCase):
-
- def config(self, config_files):
- CONF(args=['domain_config_upload'],
- project='keystone', default_config_files=config_files)
-
- def test_config_upload(self):
- dependency.reset()
- with mock.patch('six.moves.builtins.print') as mock_print:
- self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
- mock_print.assert_has_calls(
- [mock.call(
- _('At least one option must be provided, use either '
- '--all or --domain-name'))])
-
-
-class CliDomainConfigTooManyOptionsTestCase(CliDomainConfigAllTestCase):
-
- def config(self, config_files):
- CONF(args=['domain_config_upload', '--all', '--domain-name',
- 'Default'],
- project='keystone', default_config_files=config_files)
-
- def test_config_upload(self):
- dependency.reset()
- with mock.patch('six.moves.builtins.print') as mock_print:
- self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
- mock_print.assert_has_calls(
- [mock.call(_('The --all option cannot be used with '
- 'the --domain-name option'))])
-
-
-class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase):
-
- def config(self, config_files):
- self.invalid_domain_name = uuid.uuid4().hex
- CONF(args=['domain_config_upload', '--domain-name',
- self.invalid_domain_name],
- project='keystone', default_config_files=config_files)
-
- def test_config_upload(self):
- dependency.reset()
- with mock.patch('six.moves.builtins.print') as mock_print:
- self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
- file_name = 'keystone.%s.conf' % self.invalid_domain_name
- error_msg = (_(
- 'Invalid domain name: %(domain)s found in config file name: '
- '%(file)s - ignoring this file.') % {
- 'domain': self.invalid_domain_name,
- 'file': os.path.join(CONF.identity.domain_config_dir,
- file_name)})
- mock_print.assert_has_calls([mock.call(error_msg)])
-
-
-class TestDomainConfigFinder(unit.BaseTestCase):
-
- def setUp(self):
- super(TestDomainConfigFinder, self).setUp()
- self.logging = self.useFixture(fixtures.LoggerFixture())
-
- @mock.patch('os.walk')
- def test_finder_ignores_files(self, mock_walk):
- mock_walk.return_value = [
- ['.', [], ['file.txt', 'keystone.conf', 'keystone.domain0.conf']],
- ]
-
- domain_configs = list(cli._domain_config_finder('.'))
-
- expected_domain_configs = [('./keystone.domain0.conf', 'domain0')]
- self.assertThat(domain_configs,
- matchers.Equals(expected_domain_configs))
-
- expected_msg_template = ('Ignoring file (%s) while scanning '
- 'domain config directory')
- self.assertThat(
- self.logging.output,
- matchers.Contains(expected_msg_template % 'file.txt'))
- self.assertThat(
- self.logging.output,
- matchers.Contains(expected_msg_template % 'keystone.conf'))
diff --git a/keystone-moon/keystone/tests/unit/test_config.py b/keystone-moon/keystone/tests/unit/test_config.py
deleted file mode 100644
index d7e7809f..00000000
--- a/keystone-moon/keystone/tests/unit/test_config.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-
-from keystone.common import config
-from keystone import exception
-from keystone.tests import unit
-
-
-CONF = cfg.CONF
-
-
-class ConfigTestCase(unit.TestCase):
-
- def config_files(self):
- config_files = super(ConfigTestCase, self).config_files()
- # Insert the keystone sample as the first config file to be loaded
- # since it is used in one of the code paths to determine the paste-ini
- # location.
- config_files.insert(0, unit.dirs.etc('keystone.conf.sample'))
- return config_files
-
- def test_paste_config(self):
- self.assertEqual(unit.dirs.etc('keystone-paste.ini'),
- config.find_paste_config())
- self.config_fixture.config(group='paste_deploy',
- config_file=uuid.uuid4().hex)
- self.assertRaises(exception.ConfigFileNotFound,
- config.find_paste_config)
- self.config_fixture.config(group='paste_deploy', config_file='')
- self.assertEqual(unit.dirs.etc('keystone.conf.sample'),
- config.find_paste_config())
-
- def test_config_default(self):
- self.assertIs(None, CONF.auth.password)
- self.assertIs(None, CONF.auth.token)
-
-
-class DeprecatedTestCase(unit.TestCase):
- """Test using the original (deprecated) name for renamed options."""
-
- def config_files(self):
- config_files = super(DeprecatedTestCase, self).config_files()
- config_files.append(unit.dirs.tests_conf('deprecated.conf'))
- return config_files
-
- def test_sql(self):
- # Options in [sql] were moved to [database] in Icehouse for the change
- # to use oslo-incubator's db.sqlalchemy.sessions.
-
- self.assertEqual('sqlite://deprecated', CONF.database.connection)
- self.assertEqual(54321, CONF.database.idle_timeout)
-
-
-class DeprecatedOverrideTestCase(unit.TestCase):
- """Test using the deprecated AND new name for renamed options."""
-
- def config_files(self):
- config_files = super(DeprecatedOverrideTestCase, self).config_files()
- config_files.append(unit.dirs.tests_conf('deprecated_override.conf'))
- return config_files
-
- def test_sql(self):
- # Options in [sql] were moved to [database] in Icehouse for the change
- # to use oslo-incubator's db.sqlalchemy.sessions.
-
- self.assertEqual('sqlite://new', CONF.database.connection)
- self.assertEqual(65432, CONF.database.idle_timeout)
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_ec2.py b/keystone-moon/keystone/tests/unit/test_contrib_ec2.py
deleted file mode 100644
index 2810a47a..00000000
--- a/keystone-moon/keystone/tests/unit/test_contrib_ec2.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# Copyright 2015 Intel Corporation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystoneclient.contrib.ec2 import utils as ec2_utils
-
-from keystone.contrib.ec2 import controllers
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import database
-
-
-class TestCredentialEc2(unit.TestCase):
- # TODO(davechen): more testcases for ec2 credential are expected here and
- # the file name would be renamed to "test_credential" to correspond with
- # "test_v3_credential.py".
- def setUp(self):
- super(TestCredentialEc2, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
- self.load_fixtures(default_fixtures)
- self.user_id = self.user_foo['id']
- self.project_id = self.tenant_bar['id']
- self.blob = {'access': uuid.uuid4().hex,
- 'secret': uuid.uuid4().hex}
- self.controller = controllers.Ec2Controller()
- self.creds_ref = {'user_id': self.user_id,
- 'tenant_id': self.project_id,
- 'access': self.blob['access'],
- 'secret': self.blob['secret'],
- 'trust_id': None}
-
- def test_signature_validate_no_host_port(self):
- """Test signature validation with the access/secret provided."""
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- request = {'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(request)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- self.assertTrue(self.controller.check_signature(self.creds_ref,
- sig_ref))
-
- def test_signature_validate_with_host_port(self):
- """Test signature validation when host is bound with port.
-
- Host is bound with a port, generally, the port here is not the
- standard port for the protocol, like '80' for HTTP and port 443
- for HTTPS, the port is not omitted by the client library.
- """
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- request = {'host': 'foo:8181',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(request)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo:8181',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- self.assertTrue(self.controller.check_signature(self.creds_ref,
- sig_ref))
-
- def test_signature_validate_with_missed_host_port(self):
- """Test signature validation when host is bound with well-known port.
-
- Host is bound with a port, but the port is well-know port like '80'
- for HTTP and port 443 for HTTPS, sometimes, client library omit
- the port but then make the request with the port.
- see (How to create the string to sign): 'http://docs.aws.amazon.com/
- general/latest/gr/signature-version-2.html'.
-
- Since "credentials['host']" is not set by client library but is
- taken from "req.host", so caused the differences.
- """
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- # Omit the port to generate the signature.
- cnt_req = {'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(cnt_req)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo:8080',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- # Check the signature again after omitting the port.
- self.assertTrue(self.controller.check_signature(self.creds_ref,
- sig_ref))
-
- def test_signature_validate_no_signature(self):
- """Signature is not presented in signature reference data."""
- access = self.blob['access']
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
-
- sig_ref = {'access': access,
- 'signature': None,
- 'host': 'foo:8080',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- creds_ref = {'user_id': self.user_id,
- 'tenant_id': self.project_id,
- 'access': self.blob['access'],
- 'secret': self.blob['secret'],
- 'trust_id': None
- }
-
- # Now validate the signature based on the dummy request
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- creds_ref, sig_ref)
-
- def test_signature_validate_invalid_signature(self):
- """Signature is not signed on the correct data."""
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- request = {'host': 'bar',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(request)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo:8080',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- creds_ref = {'user_id': self.user_id,
- 'tenant_id': self.project_id,
- 'access': self.blob['access'],
- 'secret': self.blob['secret'],
- 'trust_id': None
- }
-
- # Now validate the signature based on the dummy request
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- creds_ref, sig_ref)
-
- def test_check_non_admin_user(self):
- """Checking if user is admin causes uncaught error.
-
- When checking if a user is an admin, keystone.exception.Unauthorized
- is raised but not caught if the user is not an admin.
- """
- # make a non-admin user
- context = {'is_admin': False, 'token_id': uuid.uuid4().hex}
-
- # check if user is admin
- # no exceptions should be raised
- self.controller._is_admin(context)
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py b/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py
deleted file mode 100644
index c9706da7..00000000
--- a/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystone.contrib import s3
-from keystone import exception
-from keystone.tests import unit
-
-
-class S3ContribCore(unit.TestCase):
- def setUp(self):
- super(S3ContribCore, self).setUp()
-
- self.load_backends()
-
- self.controller = s3.S3Controller()
-
- def test_good_signature_v1(self):
- creds_ref = {'secret':
- u'b121dd41cdcc42fe9f70e572e84295aa'}
- credentials = {'token':
- 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
- 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
- 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ'
- 'vbV9zMy50eHQ=',
- 'signature': 'IL4QLcLVaYgylF9iHj6Wb8BGZsw='}
-
- self.assertIsNone(self.controller.check_signature(creds_ref,
- credentials))
-
- def test_bad_signature_v1(self):
- creds_ref = {'secret':
- u'b121dd41cdcc42fe9f70e572e84295aa'}
- credentials = {'token':
- 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
- 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
- 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ'
- 'vbV9zMy50eHQ=',
- 'signature': uuid.uuid4().hex}
-
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- creds_ref, credentials)
-
- def test_good_signature_v4(self):
- creds_ref = {'secret':
- u'e7a7a2240136494986991a6598d9fb9f'}
- credentials = {'token':
- 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw'
- 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy'
- 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1'
- 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==',
- 'signature':
- '730ba8f58df6ffeadd78f402e990b2910d60'
- 'bc5c2aec63619734f096a4dd77be'}
-
- self.assertIsNone(self.controller.check_signature(creds_ref,
- credentials))
-
- def test_bad_signature_v4(self):
- creds_ref = {'secret':
- u'e7a7a2240136494986991a6598d9fb9f'}
- credentials = {'token':
- 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw'
- 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy'
- 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1'
- 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==',
- 'signature': uuid.uuid4().hex}
-
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- creds_ref, credentials)
-
- def test_bad_token_v4(self):
- creds_ref = {'secret':
- u'e7a7a2240136494986991a6598d9fb9f'}
- # token has invalid format of first part
- credentials = {'token':
- 'QVdTNC1BQUEKWApYClg=',
- 'signature': ''}
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- creds_ref, credentials)
-
- # token has invalid format of scope
- credentials = {'token':
- 'QVdTNC1ITUFDLVNIQTI1NgpYCi8vczMvYXdzTl9yZXF1ZXN0Clg=',
- 'signature': ''}
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- creds_ref, credentials)
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py b/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py
deleted file mode 100644
index 111aa5c6..00000000
--- a/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from six.moves import http_client
-
-from keystone.tests.unit import test_v3
-
-
-class BaseTestCase(test_v3.RestfulTestCase):
-
- CA_PATH = '/v3/OS-SIMPLE-CERT/ca'
- CERT_PATH = '/v3/OS-SIMPLE-CERT/certificates'
-
-
-class TestSimpleCert(BaseTestCase):
-
- def request_cert(self, path):
- content_type = 'application/x-pem-file'
- response = self.request(app=self.public_app,
- method='GET',
- path=path,
- headers={'Accept': content_type},
- expected_status=http_client.OK)
-
- self.assertEqual(content_type, response.content_type.lower())
- self.assertIn(b'---BEGIN', response.body)
-
- return response
-
- def test_ca_cert(self):
- self.request_cert(self.CA_PATH)
-
- def test_signing_cert(self):
- self.request_cert(self.CERT_PATH)
-
- def test_missing_file(self):
- # these files do not exist
- self.config_fixture.config(group='signing',
- ca_certs=uuid.uuid4().hex,
- certfile=uuid.uuid4().hex)
-
- for path in [self.CA_PATH, self.CERT_PATH]:
- self.request(app=self.public_app,
- method='GET',
- path=path,
- expected_status=http_client.INTERNAL_SERVER_ERROR)
diff --git a/keystone-moon/keystone/tests/unit/test_credential.py b/keystone-moon/keystone/tests/unit/test_credential.py
deleted file mode 100644
index e917ef71..00000000
--- a/keystone-moon/keystone/tests/unit/test_credential.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# Copyright 2015 UnitedStack, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystoneclient.contrib.ec2 import utils as ec2_utils
-from six.moves import http_client
-
-from keystone.common import utils
-from keystone.contrib.ec2 import controllers
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit import rest
-
-CRED_TYPE_EC2 = controllers.CRED_TYPE_EC2
-
-
-class V2CredentialEc2TestCase(rest.RestfulTestCase):
- def setUp(self):
- super(V2CredentialEc2TestCase, self).setUp()
- self.user_id = self.user_foo['id']
- self.project_id = self.tenant_bar['id']
-
- def _get_token_id(self, r):
- return r.result['access']['token']['id']
-
- def _get_ec2_cred(self):
- uri = self._get_ec2_cred_uri()
- r = self.public_request(method='POST', token=self.get_scoped_token(),
- path=uri, body={'tenant_id': self.project_id})
- return r.result['credential']
-
- def _get_ec2_cred_uri(self):
- return '/v2.0/users/%s/credentials/OS-EC2' % self.user_id
-
- def test_ec2_cannot_get_non_ec2_credential(self):
- access_key = uuid.uuid4().hex
- cred_id = utils.hash_access_key(access_key)
- non_ec2_cred = unit.new_credential_ref(
- user_id=self.user_id,
- project_id=self.project_id)
- non_ec2_cred['id'] = cred_id
- self.credential_api.create_credential(cred_id, non_ec2_cred)
-
- # if access_key is not found, ec2 controller raises Unauthorized
- # exception
- path = '/'.join([self._get_ec2_cred_uri(), access_key])
- self.public_request(method='GET', token=self.get_scoped_token(),
- path=path,
- expected_status=http_client.UNAUTHORIZED)
-
- def assertValidErrorResponse(self, r):
- # FIXME(wwwjfy): it's copied from test_v3.py. The logic of this method
- # in test_v2.py and test_v3.py (both are inherited from rest.py) has no
- # difference, so they should be refactored into one place. Also, the
- # function signatures in both files don't match the one in the parent
- # class in rest.py.
- resp = r.result
- self.assertIsNotNone(resp.get('error'))
- self.assertIsNotNone(resp['error'].get('code'))
- self.assertIsNotNone(resp['error'].get('title'))
- self.assertIsNotNone(resp['error'].get('message'))
- self.assertEqual(int(resp['error']['code']), r.status_code)
-
- def test_ec2_list_credentials(self):
- self._get_ec2_cred()
- uri = self._get_ec2_cred_uri()
- r = self.public_request(method='GET', token=self.get_scoped_token(),
- path=uri)
- cred_list = r.result['credentials']
- self.assertEqual(1, len(cred_list))
-
- # non-EC2 credentials won't be fetched
- non_ec2_cred = unit.new_credential_ref(
- user_id=self.user_id,
- project_id=self.project_id)
- non_ec2_cred['type'] = uuid.uuid4().hex
- self.credential_api.create_credential(non_ec2_cred['id'],
- non_ec2_cred)
- r = self.public_request(method='GET', token=self.get_scoped_token(),
- path=uri)
- cred_list_2 = r.result['credentials']
- # still one element because non-EC2 credentials are not returned.
- self.assertEqual(1, len(cred_list_2))
- self.assertEqual(cred_list[0], cred_list_2[0])
-
-
-class V2CredentialEc2Controller(unit.TestCase):
- def setUp(self):
- super(V2CredentialEc2Controller, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
- self.load_fixtures(default_fixtures)
- self.user_id = self.user_foo['id']
- self.project_id = self.tenant_bar['id']
- self.controller = controllers.Ec2Controller()
- self.blob, tmp_ref = unit.new_ec2_credential(
- user_id=self.user_id,
- project_id=self.project_id)
-
- self.creds_ref = (controllers.Ec2Controller
- ._convert_v3_to_ec2_credential(tmp_ref))
-
- def test_signature_validate_no_host_port(self):
- """Test signature validation with the access/secret provided."""
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- request = {'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(request)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- self.assertTrue(self.controller.check_signature(self.creds_ref,
- sig_ref))
-
- def test_signature_validate_with_host_port(self):
- """Test signature validation when host is bound with port.
-
- Host is bound with a port, generally, the port here is not the
- standard port for the protocol, like '80' for HTTP and port 443
- for HTTPS, the port is not omitted by the client library.
- """
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- request = {'host': 'foo:8181',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(request)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo:8181',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- self.assertTrue(self.controller.check_signature(self.creds_ref,
- sig_ref))
-
- def test_signature_validate_with_missed_host_port(self):
- """Test signature validation when host is bound with well-known port.
-
- Host is bound with a port, but the port is well-know port like '80'
- for HTTP and port 443 for HTTPS, sometimes, client library omit
- the port but then make the request with the port.
- see (How to create the string to sign): 'http://docs.aws.amazon.com/
- general/latest/gr/signature-version-2.html'.
-
- Since "credentials['host']" is not set by client library but is
- taken from "req.host", so caused the differences.
- """
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- # Omit the port to generate the signature.
- cnt_req = {'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(cnt_req)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo:8080',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- # Check the signature again after omitting the port.
- self.assertTrue(self.controller.check_signature(self.creds_ref,
- sig_ref))
-
- def test_signature_validate_no_signature(self):
- """Signature is not presented in signature reference data."""
- access = self.blob['access']
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
-
- sig_ref = {'access': access,
- 'signature': None,
- 'host': 'foo:8080',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- self.creds_ref, sig_ref)
-
- def test_signature_validate_invalid_signature(self):
- """Signature is not signed on the correct data."""
- access = self.blob['access']
- secret = self.blob['secret']
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- request = {'host': 'bar',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(request)
-
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo:8080',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
-
- # Now validate the signature based on the dummy request
- self.assertRaises(exception.Unauthorized,
- self.controller.check_signature,
- self.creds_ref, sig_ref)
-
- def test_check_non_admin_user(self):
- """Checking if user is admin causes uncaught error.
-
- When checking if a user is an admin, keystone.exception.Unauthorized
- is raised but not caught if the user is not an admin.
- """
- # make a non-admin user
- context = {'is_admin': False, 'token_id': uuid.uuid4().hex}
-
- # check if user is admin
- # no exceptions should be raised
- self.controller._is_admin(context)
diff --git a/keystone-moon/keystone/tests/unit/test_driver_hints.py b/keystone-moon/keystone/tests/unit/test_driver_hints.py
deleted file mode 100644
index 75d76194..00000000
--- a/keystone-moon/keystone/tests/unit/test_driver_hints.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.common import driver_hints
-from keystone.tests.unit import core as test
-
-
-class ListHintsTests(test.TestCase):
-
- def test_create_iterate_satisfy(self):
- hints = driver_hints.Hints()
- hints.add_filter('t1', 'data1')
- hints.add_filter('t2', 'data2')
- self.assertEqual(2, len(hints.filters))
- filter = hints.get_exact_filter_by_name('t1')
- self.assertEqual('t1', filter['name'])
- self.assertEqual('data1', filter['value'])
- self.assertEqual('equals', filter['comparator'])
- self.assertFalse(filter['case_sensitive'])
-
- hints.filters.remove(filter)
- filter_count = 0
- for filter in hints.filters:
- filter_count += 1
- self.assertEqual('t2', filter['name'])
- self.assertEqual(1, filter_count)
-
- def test_multiple_creates(self):
- hints = driver_hints.Hints()
- hints.add_filter('t1', 'data1')
- hints.add_filter('t2', 'data2')
- self.assertEqual(2, len(hints.filters))
- hints2 = driver_hints.Hints()
- hints2.add_filter('t4', 'data1')
- hints2.add_filter('t5', 'data2')
- self.assertEqual(2, len(hints.filters))
-
- def test_limits(self):
- hints = driver_hints.Hints()
- self.assertIsNone(hints.limit)
- hints.set_limit(10)
- self.assertEqual(10, hints.limit['limit'])
- self.assertFalse(hints.limit['truncated'])
- hints.set_limit(11)
- self.assertEqual(11, hints.limit['limit'])
- self.assertFalse(hints.limit['truncated'])
- hints.set_limit(10, truncated=True)
- self.assertEqual(10, hints.limit['limit'])
- self.assertTrue(hints.limit['truncated'])
diff --git a/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py b/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py
deleted file mode 100644
index 03c95e27..00000000
--- a/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystonemiddleware import ec2_token as ksm_ec2_token
-
-from keystone.middleware import ec2_token
-from keystone.tests import unit as tests
-
-
-class EC2TokenMiddlewareTestBase(tests.BaseTestCase):
- def test_symbols(self):
- """Verify ec2 middleware symbols.
-
- Verify that the keystone version of ec2_token middleware forwards the
- public symbols from the keystonemiddleware version of the ec2_token
- middleware for backwards compatibility.
-
- """
-
- self.assertIs(ksm_ec2_token.app_factory, ec2_token.app_factory)
- self.assertIs(ksm_ec2_token.filter_factory, ec2_token.filter_factory)
- self.assertTrue(
- issubclass(ec2_token.EC2Token, ksm_ec2_token.EC2Token),
- 'ec2_token.EC2Token is not subclass of '
- 'keystonemiddleware.ec2_token.EC2Token')
diff --git a/keystone-moon/keystone/tests/unit/test_entry_points.py b/keystone-moon/keystone/tests/unit/test_entry_points.py
deleted file mode 100644
index e973e942..00000000
--- a/keystone-moon/keystone/tests/unit/test_entry_points.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import stevedore
-from testtools import matchers
-
-from keystone.tests.unit import core as test
-
-
-class TestPasteDeploymentEntryPoints(test.TestCase):
- def test_entry_point_middleware(self):
- """Assert that our list of expected middleware is present."""
- expected_names = [
- 'admin_token_auth',
- 'build_auth_context',
- 'crud_extension',
- 'cors',
- 'debug',
- 'endpoint_filter_extension',
- 'ec2_extension',
- 'ec2_extension_v3',
- 'federation_extension',
- 'json_body',
- 'oauth1_extension',
- 'request_id',
- 'revoke_extension',
- 's3_extension',
- 'simple_cert_extension',
- 'sizelimit',
- 'token_auth',
- 'url_normalize',
- 'user_crud_extension',
- ]
-
- em = stevedore.ExtensionManager('paste.filter_factory')
-
- actual_names = [extension.name for extension in em]
-
- self.assertThat(actual_names, matchers.ContainsAll(expected_names))
diff --git a/keystone-moon/keystone/tests/unit/test_exception.py b/keystone-moon/keystone/tests/unit/test_exception.py
deleted file mode 100644
index 25ca2c09..00000000
--- a/keystone-moon/keystone/tests/unit/test_exception.py
+++ /dev/null
@@ -1,273 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-from oslo_config import fixture as config_fixture
-from oslo_serialization import jsonutils
-import six
-
-from keystone.common import wsgi
-from keystone import exception
-from keystone.tests import unit
-
-
-class ExceptionTestCase(unit.BaseTestCase):
- def assertValidJsonRendering(self, e):
- resp = wsgi.render_exception(e)
- self.assertEqual(e.code, resp.status_int)
- self.assertEqual('%s %s' % (e.code, e.title), resp.status)
-
- j = jsonutils.loads(resp.body)
- self.assertIsNotNone(j.get('error'))
- self.assertIsNotNone(j['error'].get('code'))
- self.assertIsNotNone(j['error'].get('title'))
- self.assertIsNotNone(j['error'].get('message'))
- self.assertNotIn('\n', j['error']['message'])
- self.assertNotIn(' ', j['error']['message'])
- self.assertTrue(type(j['error']['code']) is int)
-
- def test_all_json_renderings(self):
- """Everything callable in the exception module should be renderable.
-
- ... except for the base error class (exception.Error), which is not
- user-facing.
-
- This test provides a custom message to bypass docstring parsing, which
- should be tested separately.
-
- """
- for cls in [x for x in exception.__dict__.values() if callable(x)]:
- if cls is not exception.Error and isinstance(cls, exception.Error):
- self.assertValidJsonRendering(cls(message='Overridden.'))
-
- def test_validation_error(self):
- target = uuid.uuid4().hex
- attribute = uuid.uuid4().hex
- e = exception.ValidationError(target=target, attribute=attribute)
- self.assertValidJsonRendering(e)
- self.assertIn(target, six.text_type(e))
- self.assertIn(attribute, six.text_type(e))
-
- def test_not_found(self):
- target = uuid.uuid4().hex
- e = exception.NotFound(target=target)
- self.assertValidJsonRendering(e)
- self.assertIn(target, six.text_type(e))
-
- def test_forbidden_title(self):
- e = exception.Forbidden()
- resp = wsgi.render_exception(e)
- j = jsonutils.loads(resp.body)
- self.assertEqual('Forbidden', e.title)
- self.assertEqual('Forbidden', j['error'].get('title'))
-
- def test_unicode_message(self):
- message = u'Comment \xe7a va'
- e = exception.Error(message)
-
- try:
- self.assertEqual(message, six.text_type(e))
- except UnicodeEncodeError:
- self.fail("unicode error message not supported")
-
- def test_unicode_string(self):
- e = exception.ValidationError(attribute='xx',
- target='Long \xe2\x80\x93 Dash')
-
- if six.PY2:
- self.assertIn(u'\u2013', six.text_type(e))
- else:
- self.assertIn('Long \xe2\x80\x93 Dash', six.text_type(e))
-
- def test_invalid_unicode_string(self):
- # NOTE(jamielennox): This is a complete failure case so what is
- # returned in the exception message is not that important so long
- # as there is an error with a message
- e = exception.ValidationError(attribute='xx',
- target='\xe7a va')
-
- if six.PY2:
- self.assertIn('%(attribute)', six.text_type(e))
- else:
- # There's no UnicodeDecodeError on python 3.
- self.assertIn('\xe7a va', six.text_type(e))
-
-
-class UnexpectedExceptionTestCase(ExceptionTestCase):
- """Tests if internal info is exposed to the API user on UnexpectedError."""
-
- class SubClassExc(exception.UnexpectedError):
- debug_message_format = 'Debug Message: %(debug_info)s'
-
- def setUp(self):
- super(UnexpectedExceptionTestCase, self).setUp()
- self.exc_str = uuid.uuid4().hex
- self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF))
-
- def test_unexpected_error_no_debug(self):
- self.config_fixture.config(debug=False)
- e = exception.UnexpectedError(exception=self.exc_str)
- self.assertNotIn(self.exc_str, six.text_type(e))
-
- def test_unexpected_error_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
- e = exception.UnexpectedError(exception=self.exc_str)
- self.assertIn(self.exc_str, six.text_type(e))
-
- def test_unexpected_error_subclass_no_debug(self):
- self.config_fixture.config(debug=False)
- e = UnexpectedExceptionTestCase.SubClassExc(
- debug_info=self.exc_str)
- self.assertEqual(exception.UnexpectedError.message_format,
- six.text_type(e))
-
- def test_unexpected_error_subclass_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
- subclass = self.SubClassExc
-
- e = subclass(debug_info=self.exc_str)
- expected = subclass.debug_message_format % {'debug_info': self.exc_str}
- self.assertEqual(
- '%s %s' % (expected, exception.SecurityError.amendment),
- six.text_type(e))
-
- def test_unexpected_error_custom_message_no_debug(self):
- self.config_fixture.config(debug=False)
- e = exception.UnexpectedError(self.exc_str)
- self.assertEqual(exception.UnexpectedError.message_format,
- six.text_type(e))
-
- def test_unexpected_error_custom_message_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
- e = exception.UnexpectedError(self.exc_str)
- self.assertEqual(
- '%s %s' % (self.exc_str, exception.SecurityError.amendment),
- six.text_type(e))
-
- def test_unexpected_error_custom_message_exception_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
- orig_e = exception.NotFound(target=uuid.uuid4().hex)
- e = exception.UnexpectedError(orig_e)
- self.assertEqual(
- '%s %s' % (six.text_type(orig_e),
- exception.SecurityError.amendment),
- six.text_type(e))
-
- def test_unexpected_error_custom_message_binary_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
- binary_msg = b'something'
- e = exception.UnexpectedError(binary_msg)
- self.assertEqual(
- '%s %s' % (six.text_type(binary_msg),
- exception.SecurityError.amendment),
- six.text_type(e))
-
-
-class SecurityErrorTestCase(ExceptionTestCase):
- """Tests whether security-related info is exposed to the API user."""
-
- def setUp(self):
- super(SecurityErrorTestCase, self).setUp()
- self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF))
-
- def test_unauthorized_exposure(self):
- self.config_fixture.config(debug=False)
-
- risky_info = uuid.uuid4().hex
- e = exception.Unauthorized(message=risky_info)
- self.assertValidJsonRendering(e)
- self.assertNotIn(risky_info, six.text_type(e))
-
- def test_unauthorized_exposure_in_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
-
- risky_info = uuid.uuid4().hex
- e = exception.Unauthorized(message=risky_info)
- self.assertValidJsonRendering(e)
- self.assertIn(risky_info, six.text_type(e))
-
- def test_forbidden_exposure(self):
- self.config_fixture.config(debug=False)
-
- risky_info = uuid.uuid4().hex
- e = exception.Forbidden(message=risky_info)
- self.assertValidJsonRendering(e)
- self.assertNotIn(risky_info, six.text_type(e))
-
- def test_forbidden_exposure_in_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
-
- risky_info = uuid.uuid4().hex
- e = exception.Forbidden(message=risky_info)
- self.assertValidJsonRendering(e)
- self.assertIn(risky_info, six.text_type(e))
-
- def test_forbidden_action_exposure(self):
- self.config_fixture.config(debug=False)
-
- risky_info = uuid.uuid4().hex
- action = uuid.uuid4().hex
- e = exception.ForbiddenAction(message=risky_info, action=action)
- self.assertValidJsonRendering(e)
- self.assertNotIn(risky_info, six.text_type(e))
- self.assertIn(action, six.text_type(e))
- self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
-
- e = exception.ForbiddenAction(action=action)
- self.assertValidJsonRendering(e)
- self.assertIn(action, six.text_type(e))
- self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
-
- def test_forbidden_action_exposure_in_debug(self):
- self.config_fixture.config(debug=True, insecure_debug=True)
-
- risky_info = uuid.uuid4().hex
- action = uuid.uuid4().hex
-
- e = exception.ForbiddenAction(message=risky_info, action=action)
- self.assertValidJsonRendering(e)
- self.assertIn(risky_info, six.text_type(e))
- self.assertIn(exception.SecurityError.amendment, six.text_type(e))
-
- e = exception.ForbiddenAction(action=action)
- self.assertValidJsonRendering(e)
- self.assertIn(action, six.text_type(e))
- self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
-
- def test_forbidden_action_no_message(self):
- # When no custom message is given when the ForbiddenAction (or other
- # SecurityError subclass) is created the exposed message is the same
- # whether debug is enabled or not.
-
- action = uuid.uuid4().hex
-
- self.config_fixture.config(debug=False)
- e = exception.ForbiddenAction(action=action)
- exposed_message = six.text_type(e)
- self.assertIn(action, exposed_message)
- self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
-
- self.config_fixture.config(debug=True)
- e = exception.ForbiddenAction(action=action)
- self.assertEqual(exposed_message, six.text_type(e))
-
- def test_unicode_argument_message(self):
- self.config_fixture.config(debug=False)
-
- risky_info = u'\u7ee7\u7eed\u884c\u7f29\u8fdb\u6216'
- e = exception.Forbidden(message=risky_info)
- self.assertValidJsonRendering(e)
- self.assertNotIn(risky_info, six.text_type(e))
diff --git a/keystone-moon/keystone/tests/unit/test_hacking_checks.py b/keystone-moon/keystone/tests/unit/test_hacking_checks.py
deleted file mode 100644
index e279cc7f..00000000
--- a/keystone-moon/keystone/tests/unit/test_hacking_checks.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import textwrap
-
-import mock
-import pep8
-
-from keystone.tests.hacking import checks
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import hacking as hacking_fixtures
-
-
-class BaseStyleCheck(unit.BaseTestCase):
-
- def setUp(self):
- super(BaseStyleCheck, self).setUp()
- self.code_ex = self.useFixture(self.get_fixture())
- self.addCleanup(delattr, self, 'code_ex')
-
- def get_checker(self):
- """Returns the checker to be used for tests in this class."""
- raise NotImplemented('subclasses must provide a real implementation')
-
- def get_fixture(self):
- return hacking_fixtures.HackingCode()
-
- # We are patching pep8 so that only the check under test is actually
- # installed.
- @mock.patch('pep8._checks',
- {'physical_line': {}, 'logical_line': {}, 'tree': {}})
- def run_check(self, code):
- pep8.register_check(self.get_checker())
-
- lines = textwrap.dedent(code).strip().splitlines(True)
-
- checker = pep8.Checker(lines=lines)
- checker.check_all()
- checker.report._deferred_print.sort()
- return checker.report._deferred_print
-
- def assert_has_errors(self, code, expected_errors=None):
- actual_errors = [e[:3] for e in self.run_check(code)]
- self.assertEqual(expected_errors or [], actual_errors)
-
-
-class TestCheckForMutableDefaultArgs(BaseStyleCheck):
-
- def get_checker(self):
- return checks.CheckForMutableDefaultArgs
-
- def test(self):
- code = self.code_ex.mutable_default_args['code']
- errors = self.code_ex.mutable_default_args['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
-
-
-class TestBlockCommentsBeginWithASpace(BaseStyleCheck):
-
- def get_checker(self):
- return checks.block_comments_begin_with_a_space
-
- def test(self):
- code = self.code_ex.comments_begin_with_space['code']
- errors = self.code_ex.comments_begin_with_space['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
-
-
-class TestAssertingNoneEquality(BaseStyleCheck):
-
- def get_checker(self):
- return checks.CheckForAssertingNoneEquality
-
- def test(self):
- code = self.code_ex.asserting_none_equality['code']
- errors = self.code_ex.asserting_none_equality['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
-
-
-class BaseLoggingCheck(BaseStyleCheck):
-
- def get_checker(self):
- return checks.CheckForLoggingIssues
-
- def get_fixture(self):
- return hacking_fixtures.HackingLogging()
-
- def assert_has_errors(self, code, expected_errors=None):
-
- # pull out the parts of the error that we'll match against
- actual_errors = (e[:3] for e in self.run_check(code))
- # adjust line numbers to make the fixture data more readable.
- import_lines = len(self.code_ex.shared_imports.split('\n')) - 1
- actual_errors = [(e[0] - import_lines, e[1], e[2])
- for e in actual_errors]
- self.assertEqual(expected_errors or [], actual_errors)
-
-
-class TestCheckForDebugLoggingIssues(BaseLoggingCheck):
-
- def test_for_translations(self):
- fixture = self.code_ex.assert_no_translations_for_debug_logging
- code = self.code_ex.shared_imports + fixture['code']
- errors = fixture['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
-
-
-class TestLoggingWithWarn(BaseLoggingCheck):
-
- def test(self):
- data = self.code_ex.assert_not_using_deprecated_warn
- code = self.code_ex.shared_imports + data['code']
- errors = data['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
-
-
-class TestCheckForNonDebugLoggingIssues(BaseLoggingCheck):
-
- def test_for_translations(self):
- for example in self.code_ex.examples:
- code = self.code_ex.shared_imports + example['code']
- errors = example['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
-
-
-class TestDictConstructorWithSequenceCopy(BaseStyleCheck):
-
- def get_checker(self):
- return checks.dict_constructor_with_sequence_copy
-
- def test(self):
- code = self.code_ex.dict_constructor['code']
- errors = self.code_ex.dict_constructor['expected_errors']
- self.assert_has_errors(code, expected_errors=errors)
diff --git a/keystone-moon/keystone/tests/unit/test_ipv6.py b/keystone-moon/keystone/tests/unit/test_ipv6.py
deleted file mode 100644
index df59429e..00000000
--- a/keystone-moon/keystone/tests/unit/test_ipv6.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from oslo_config import cfg
-
-from keystone.common import environment
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import appserver
-
-
-CONF = cfg.CONF
-
-
-class IPv6TestCase(unit.TestCase):
-
- def setUp(self):
- self.skip_if_no_ipv6()
- super(IPv6TestCase, self).setUp()
- self.load_backends()
-
- def test_ipv6_ok(self):
- """Make sure both public and admin API work with ipv6."""
- paste_conf = self._paste_config('keystone')
-
- # Verify Admin
- with appserver.AppServer(paste_conf, appserver.ADMIN, host="::1"):
- conn = environment.httplib.HTTPConnection(
- '::1', CONF.eventlet_server.admin_port)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- # Verify Public
- with appserver.AppServer(paste_conf, appserver.MAIN, host="::1"):
- conn = environment.httplib.HTTPConnection(
- '::1', CONF.eventlet_server.public_port)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
diff --git a/keystone-moon/keystone/tests/unit/test_kvs.py b/keystone-moon/keystone/tests/unit/test_kvs.py
deleted file mode 100644
index a88ee1ac..00000000
--- a/keystone-moon/keystone/tests/unit/test_kvs.py
+++ /dev/null
@@ -1,586 +0,0 @@
-# Copyright 2013 Metacloud, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import time
-import uuid
-
-from dogpile.cache import api
-from dogpile.cache import proxy
-import mock
-import six
-from testtools import matchers
-
-from keystone.common.kvs.backends import inmemdb
-from keystone.common.kvs.backends import memcached
-from keystone.common.kvs import core
-from keystone import exception
-from keystone.tests import unit
-
-
-NO_VALUE = api.NO_VALUE
-
-
-class MutexFixture(object):
- def __init__(self, storage_dict, key, timeout):
- self.database = storage_dict
- self.key = '_lock' + key
-
- def acquire(self, wait=True):
- while True:
- try:
- self.database[self.key] = 1
- return True
- except KeyError:
- return False
-
- def release(self):
- self.database.pop(self.key, None)
-
-
-class KVSBackendFixture(inmemdb.MemoryBackend):
- def __init__(self, arguments):
- class InmemTestDB(dict):
- def __setitem__(self, key, value):
- if key in self:
- raise KeyError('Key %s already exists' % key)
- super(InmemTestDB, self).__setitem__(key, value)
-
- self._db = InmemTestDB()
- self.lock_timeout = arguments.pop('lock_timeout', 5)
- self.test_arg = arguments.pop('test_arg', None)
-
- def get_mutex(self, key):
- return MutexFixture(self._db, key, self.lock_timeout)
-
- @classmethod
- def key_mangler(cls, key):
- return 'KVSBackend_' + key
-
-
-class KVSBackendForcedKeyMangleFixture(KVSBackendFixture):
- use_backend_key_mangler = True
-
- @classmethod
- def key_mangler(cls, key):
- return 'KVSBackendForcedKeyMangle_' + key
-
-
-class RegionProxyFixture(proxy.ProxyBackend):
- """A test dogpile.cache proxy that does nothing."""
-
-
-class RegionProxy2Fixture(proxy.ProxyBackend):
- """A test dogpile.cache proxy that does nothing."""
-
-
-class TestMemcacheDriver(api.CacheBackend):
- """A test dogpile.cache backend.
-
- This test backend conforms to the mixin-mechanism for
- overriding set and set_multi methods on dogpile memcached drivers.
- """
-
- class test_client(object):
- # FIXME(morganfainberg): Convert this test client over to using mock
- # and/or mock.MagicMock as appropriate
-
- def __init__(self):
- self.__name__ = 'TestingMemcacheDriverClientObject'
- self.set_arguments_passed = None
- self.keys_values = {}
- self.lock_set_time = None
- self.lock_expiry = None
-
- def set(self, key, value, **set_arguments):
- self.keys_values.clear()
- self.keys_values[key] = value
- self.set_arguments_passed = set_arguments
-
- def set_multi(self, mapping, **set_arguments):
- self.keys_values.clear()
- self.keys_values = mapping
- self.set_arguments_passed = set_arguments
-
- def add(self, key, value, expiry_time):
- # NOTE(morganfainberg): `add` is used in this case for the
- # memcache lock testing. If further testing is required around the
- # actual memcache `add` interface, this method should be
- # expanded to work more like the actual memcache `add` function
- if self.lock_expiry is not None and self.lock_set_time is not None:
- if time.time() - self.lock_set_time < self.lock_expiry:
- return False
- self.lock_expiry = expiry_time
- self.lock_set_time = time.time()
- return True
-
- def delete(self, key):
- # NOTE(morganfainberg): `delete` is used in this case for the
- # memcache lock testing. If further testing is required around the
- # actual memcache `delete` interface, this method should be
- # expanded to work more like the actual memcache `delete` function.
- self.lock_expiry = None
- self.lock_set_time = None
- return True
-
- def __init__(self, arguments):
- self.client = self.test_client()
- self.set_arguments = {}
- # NOTE(morganfainberg): This is the same logic as the dogpile backend
- # since we need to mirror that functionality for the `set_argument`
- # values to appear on the actual backend.
- if 'memcached_expire_time' in arguments:
- self.set_arguments['time'] = arguments['memcached_expire_time']
-
- def set(self, key, value):
- self.client.set(key, value, **self.set_arguments)
-
- def set_multi(self, mapping):
- self.client.set_multi(mapping, **self.set_arguments)
-
-
-class KVSTest(unit.TestCase):
- def setUp(self):
- super(KVSTest, self).setUp()
- self.key_foo = 'foo_' + uuid.uuid4().hex
- self.value_foo = uuid.uuid4().hex
- self.key_bar = 'bar_' + uuid.uuid4().hex
- self.value_bar = {'complex_data_structure': uuid.uuid4().hex}
- self.addCleanup(memcached.VALID_DOGPILE_BACKENDS.pop,
- 'TestDriver',
- None)
- memcached.VALID_DOGPILE_BACKENDS['TestDriver'] = TestMemcacheDriver
-
- def _get_kvs_region(self, name=None):
- if name is None:
- name = uuid.uuid4().hex
- return core.get_key_value_store(name)
-
- def test_kvs_basic_configuration(self):
- # Test that the most basic configuration options pass through to the
- # backend.
- region_one = uuid.uuid4().hex
- region_two = uuid.uuid4().hex
- test_arg = 100
- kvs = self._get_kvs_region(region_one)
- kvs.configure('openstack.kvs.Memory')
-
- self.assertIsInstance(kvs._region.backend, inmemdb.MemoryBackend)
- self.assertEqual(region_one, kvs._region.name)
-
- kvs = self._get_kvs_region(region_two)
- kvs.configure('openstack.kvs.KVSBackendFixture',
- test_arg=test_arg)
-
- self.assertEqual(region_two, kvs._region.name)
- self.assertEqual(test_arg, kvs._region.backend.test_arg)
-
- def test_kvs_proxy_configuration(self):
- # Test that proxies are applied correctly and in the correct (reverse)
- # order to the kvs region.
- kvs = self._get_kvs_region()
- kvs.configure(
- 'openstack.kvs.Memory',
- proxy_list=['keystone.tests.unit.test_kvs.RegionProxyFixture',
- 'keystone.tests.unit.test_kvs.RegionProxy2Fixture'])
-
- self.assertIsInstance(kvs._region.backend, RegionProxyFixture)
- self.assertIsInstance(kvs._region.backend.proxied, RegionProxy2Fixture)
- self.assertIsInstance(kvs._region.backend.proxied.proxied,
- inmemdb.MemoryBackend)
-
- def test_kvs_key_mangler_fallthrough_default(self):
- # Test to make sure we default to the standard dogpile sha1 hashing
- # key_mangler
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memory')
-
- self.assertIs(kvs._region.key_mangler, core.sha1_mangle_key)
- # The backend should also have the keymangler set the same as the
- # region now.
- self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
-
- def test_kvs_key_mangler_configuration_backend(self):
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.KVSBackendFixture')
- expected = KVSBackendFixture.key_mangler(self.key_foo)
- self.assertEqual(expected, kvs._region.key_mangler(self.key_foo))
-
- def test_kvs_key_mangler_configuration_forced_backend(self):
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.KVSBackendForcedKeyMangleFixture',
- key_mangler=core.sha1_mangle_key)
- expected = KVSBackendForcedKeyMangleFixture.key_mangler(self.key_foo)
- self.assertEqual(expected, kvs._region.key_mangler(self.key_foo))
-
- def test_kvs_key_mangler_configuration_disabled(self):
- # Test that no key_mangler is set if enable_key_mangler is false
- self.config_fixture.config(group='kvs', enable_key_mangler=False)
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memory')
-
- self.assertIsNone(kvs._region.key_mangler)
- self.assertIsNone(kvs._region.backend.key_mangler)
-
- def test_kvs_key_mangler_set_on_backend(self):
- def test_key_mangler(key):
- return key
-
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memory')
- self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
- kvs._set_key_mangler(test_key_mangler)
- self.assertIs(kvs._region.backend.key_mangler, test_key_mangler)
-
- def test_kvs_basic_get_set_delete(self):
- # Test the basic get/set/delete actions on the KVS region
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memory')
-
- # Not found should be raised if the key doesn't exist
- self.assertRaises(exception.NotFound, kvs.get, key=self.key_bar)
- kvs.set(self.key_bar, self.value_bar)
- returned_value = kvs.get(self.key_bar)
- # The returned value should be the same value as the value in .set
- self.assertEqual(self.value_bar, returned_value)
- # The value should not be the exact object used in .set
- self.assertIsNot(returned_value, self.value_bar)
- kvs.delete(self.key_bar)
- # Second delete should raise NotFound
- self.assertRaises(exception.NotFound, kvs.delete, key=self.key_bar)
-
- def _kvs_multi_get_set_delete(self, kvs):
- keys = [self.key_foo, self.key_bar]
- expected = [self.value_foo, self.value_bar]
-
- kvs.set_multi({self.key_foo: self.value_foo,
- self.key_bar: self.value_bar})
- # Returned value from get_multi should be a list of the values of the
- # keys
- self.assertEqual(expected, kvs.get_multi(keys))
- # Delete both keys
- kvs.delete_multi(keys)
- # make sure that NotFound is properly raised when trying to get the now
- # deleted keys
- self.assertRaises(exception.NotFound, kvs.get_multi, keys=keys)
- self.assertRaises(exception.NotFound, kvs.get, key=self.key_foo)
- self.assertRaises(exception.NotFound, kvs.get, key=self.key_bar)
- # Make sure get_multi raises NotFound if one of the keys isn't found
- kvs.set(self.key_foo, self.value_foo)
- self.assertRaises(exception.NotFound, kvs.get_multi, keys=keys)
-
- def test_kvs_multi_get_set_delete(self):
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memory')
-
- self._kvs_multi_get_set_delete(kvs)
-
- def test_kvs_locking_context_handler(self):
- # Make sure we're creating the correct key/value pairs for the backend
- # distributed locking mutex.
- self.config_fixture.config(group='kvs', enable_key_mangler=False)
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.KVSBackendFixture')
-
- lock_key = '_lock' + self.key_foo
- self.assertNotIn(lock_key, kvs._region.backend._db)
- with core.KeyValueStoreLock(kvs._mutex(self.key_foo), self.key_foo):
- self.assertIn(lock_key, kvs._region.backend._db)
- self.assertIs(kvs._region.backend._db[lock_key], 1)
-
- self.assertNotIn(lock_key, kvs._region.backend._db)
-
- def test_kvs_locking_context_handler_locking_disabled(self):
- # Make sure no creation of key/value pairs for the backend
- # distributed locking mutex occurs if locking is disabled.
- self.config_fixture.config(group='kvs', enable_key_mangler=False)
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.KVSBackendFixture', locking=False)
- lock_key = '_lock' + self.key_foo
- self.assertNotIn(lock_key, kvs._region.backend._db)
- with core.KeyValueStoreLock(kvs._mutex(self.key_foo), self.key_foo,
- False):
- self.assertNotIn(lock_key, kvs._region.backend._db)
-
- self.assertNotIn(lock_key, kvs._region.backend._db)
-
- def test_kvs_with_lock_action_context_manager_timeout(self):
- kvs = self._get_kvs_region()
- lock_timeout = 5
- kvs.configure('openstack.kvs.Memory', lock_timeout=lock_timeout)
-
- def do_with_lock_action_timeout(kvs_region, key, offset):
- with kvs_region.get_lock(key) as lock_in_use:
- self.assertTrue(lock_in_use.active)
- # Subtract the offset from the acquire_time. If this puts the
- # acquire_time difference from time.time() at >= lock_timeout
- # this should raise a LockTimeout exception. This is because
- # there is a built-in 1-second overlap where the context
- # manager thinks the lock is expired but the lock is still
- # active. This is to help mitigate race conditions on the
- # time-check itself.
- lock_in_use.acquire_time -= offset
- with kvs_region._action_with_lock(key, lock_in_use):
- pass
-
- # This should succeed, we are not timed-out here.
- do_with_lock_action_timeout(kvs, key=uuid.uuid4().hex, offset=2)
- # Try it now with an offset equal to the lock_timeout
- self.assertRaises(core.LockTimeout,
- do_with_lock_action_timeout,
- kvs_region=kvs,
- key=uuid.uuid4().hex,
- offset=lock_timeout)
- # Final test with offset significantly greater than the lock_timeout
- self.assertRaises(core.LockTimeout,
- do_with_lock_action_timeout,
- kvs_region=kvs,
- key=uuid.uuid4().hex,
- offset=100)
-
- def test_kvs_with_lock_action_mismatched_keys(self):
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memory')
-
- def do_with_lock_action(kvs_region, lock_key, target_key):
- with kvs_region.get_lock(lock_key) as lock_in_use:
- self.assertTrue(lock_in_use.active)
- with kvs_region._action_with_lock(target_key, lock_in_use):
- pass
-
- # Ensure we raise a ValueError if the lock key mismatches from the
- # target key.
- self.assertRaises(ValueError,
- do_with_lock_action,
- kvs_region=kvs,
- lock_key=self.key_foo,
- target_key=self.key_bar)
-
- def test_kvs_with_lock_action_context_manager(self):
- # Make sure we're creating the correct key/value pairs for the backend
- # distributed locking mutex.
- self.config_fixture.config(group='kvs', enable_key_mangler=False)
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.KVSBackendFixture')
-
- lock_key = '_lock' + self.key_foo
- self.assertNotIn(lock_key, kvs._region.backend._db)
- with kvs.get_lock(self.key_foo) as lock:
- with kvs._action_with_lock(self.key_foo, lock):
- self.assertTrue(lock.active)
- self.assertIn(lock_key, kvs._region.backend._db)
- self.assertIs(kvs._region.backend._db[lock_key], 1)
-
- self.assertNotIn(lock_key, kvs._region.backend._db)
-
- def test_kvs_with_lock_action_context_manager_no_lock(self):
- # Make sure we're not locking unless an actual lock is passed into the
- # context manager
- self.config_fixture.config(group='kvs', enable_key_mangler=False)
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.KVSBackendFixture')
-
- lock_key = '_lock' + self.key_foo
- lock = None
- self.assertNotIn(lock_key, kvs._region.backend._db)
- with kvs._action_with_lock(self.key_foo, lock):
- self.assertNotIn(lock_key, kvs._region.backend._db)
-
- self.assertNotIn(lock_key, kvs._region.backend._db)
-
- def test_kvs_backend_registration_does_not_reregister_backends(self):
- # SetUp registers the test backends. Running this again would raise an
- # exception if re-registration of the backends occurred.
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memory')
- core._register_backends()
-
- def test_kvs_memcached_manager_valid_dogpile_memcached_backend(self):
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memcached',
- memcached_backend='TestDriver')
- self.assertIsInstance(kvs._region.backend.driver,
- TestMemcacheDriver)
-
- def test_kvs_memcached_manager_invalid_dogpile_memcached_backend(self):
- # Invalid dogpile memcache backend should raise ValueError
- kvs = self._get_kvs_region()
- self.assertRaises(ValueError,
- kvs.configure,
- backing_store='openstack.kvs.Memcached',
- memcached_backend=uuid.uuid4().hex)
-
- def test_kvs_memcache_manager_no_expiry_keys(self):
- # Make sure the memcache backend recalculates the no-expiry keys
- # correctly when a key-mangler is set on it.
-
- def new_mangler(key):
- return '_mangled_key_' + key
-
- kvs = self._get_kvs_region()
- no_expiry_keys = set(['test_key'])
- kvs.configure('openstack.kvs.Memcached',
- memcached_backend='TestDriver',
- no_expiry_keys=no_expiry_keys)
- calculated_keys = set([kvs._region.key_mangler(key)
- for key in no_expiry_keys])
- self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
- self.assertSetEqual(calculated_keys,
- kvs._region.backend.no_expiry_hashed_keys)
- self.assertSetEqual(no_expiry_keys,
- kvs._region.backend.raw_no_expiry_keys)
- calculated_keys = set([new_mangler(key) for key in no_expiry_keys])
- kvs._region.backend.key_mangler = new_mangler
- self.assertSetEqual(calculated_keys,
- kvs._region.backend.no_expiry_hashed_keys)
- self.assertSetEqual(no_expiry_keys,
- kvs._region.backend.raw_no_expiry_keys)
-
- def test_kvs_memcache_key_mangler_set_to_none(self):
- kvs = self._get_kvs_region()
- no_expiry_keys = set(['test_key'])
- kvs.configure('openstack.kvs.Memcached',
- memcached_backend='TestDriver',
- no_expiry_keys=no_expiry_keys)
- self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
- kvs._region.backend.key_mangler = None
- self.assertSetEqual(kvs._region.backend.raw_no_expiry_keys,
- kvs._region.backend.no_expiry_hashed_keys)
- self.assertIsNone(kvs._region.backend.key_mangler)
-
- def test_noncallable_key_mangler_set_on_driver_raises_type_error(self):
- kvs = self._get_kvs_region()
- kvs.configure('openstack.kvs.Memcached',
- memcached_backend='TestDriver')
- self.assertRaises(TypeError,
- setattr,
- kvs._region.backend,
- 'key_mangler',
- 'Non-Callable')
-
- def test_kvs_memcache_set_arguments_and_memcache_expires_ttl(self):
- # Test the "set_arguments" (arguments passed on all set calls) logic
- # and the no-expiry-key modifications of set_arguments for the explicit
- # memcache TTL.
- self.config_fixture.config(group='kvs', enable_key_mangler=False)
- kvs = self._get_kvs_region()
- memcache_expire_time = 86400
-
- expected_set_args = {'time': memcache_expire_time}
- expected_no_expiry_args = {}
-
- expected_foo_keys = [self.key_foo]
- expected_bar_keys = [self.key_bar]
-
- mapping_foo = {self.key_foo: self.value_foo}
- mapping_bar = {self.key_bar: self.value_bar}
-
- kvs.configure(backing_store='openstack.kvs.Memcached',
- memcached_backend='TestDriver',
- memcached_expire_time=memcache_expire_time,
- some_other_arg=uuid.uuid4().hex,
- no_expiry_keys=[self.key_bar])
- kvs_driver = kvs._region.backend.driver
-
- # Ensure the set_arguments are correct
- self.assertDictEqual(
- expected_set_args,
- kvs._region.backend._get_set_arguments_driver_attr())
-
- # Set a key that would have an expiry and verify the correct result
- # occurred and that the correct set_arguments were passed.
- kvs.set(self.key_foo, self.value_foo)
- self.assertDictEqual(
- expected_set_args,
- kvs._region.backend.driver.client.set_arguments_passed)
- observed_foo_keys = list(kvs_driver.client.keys_values.keys())
- self.assertEqual(expected_foo_keys, observed_foo_keys)
- self.assertEqual(
- self.value_foo,
- kvs._region.backend.driver.client.keys_values[self.key_foo][0])
-
- # Set a key that would not have an expiry and verify the correct result
- # occurred and that the correct set_arguments were passed.
- kvs.set(self.key_bar, self.value_bar)
- self.assertDictEqual(
- expected_no_expiry_args,
- kvs._region.backend.driver.client.set_arguments_passed)
- observed_bar_keys = list(kvs_driver.client.keys_values.keys())
- self.assertEqual(expected_bar_keys, observed_bar_keys)
- self.assertEqual(
- self.value_bar,
- kvs._region.backend.driver.client.keys_values[self.key_bar][0])
-
- # set_multi a dict that would have an expiry and verify the correct
- # result occurred and that the correct set_arguments were passed.
- kvs.set_multi(mapping_foo)
- self.assertDictEqual(
- expected_set_args,
- kvs._region.backend.driver.client.set_arguments_passed)
- observed_foo_keys = list(kvs_driver.client.keys_values.keys())
- self.assertEqual(expected_foo_keys, observed_foo_keys)
- self.assertEqual(
- self.value_foo,
- kvs._region.backend.driver.client.keys_values[self.key_foo][0])
-
- # set_multi a dict that would not have an expiry and verify the correct
- # result occurred and that the correct set_arguments were passed.
- kvs.set_multi(mapping_bar)
- self.assertDictEqual(
- expected_no_expiry_args,
- kvs._region.backend.driver.client.set_arguments_passed)
- observed_bar_keys = list(kvs_driver.client.keys_values.keys())
- self.assertEqual(expected_bar_keys, observed_bar_keys)
- self.assertEqual(
- self.value_bar,
- kvs._region.backend.driver.client.keys_values[self.key_bar][0])
-
- def test_memcached_lock_max_lock_attempts(self):
- kvs = self._get_kvs_region()
- max_lock_attempts = 1
- test_key = uuid.uuid4().hex
-
- kvs.configure(backing_store='openstack.kvs.Memcached',
- memcached_backend='TestDriver',
- max_lock_attempts=max_lock_attempts)
-
- self.assertEqual(max_lock_attempts,
- kvs._region.backend.max_lock_attempts)
- # Simple Lock success test
- with kvs.get_lock(test_key) as lock:
- kvs.set(test_key, 'testing', lock)
-
- def lock_within_a_lock(key):
- with kvs.get_lock(key) as first_lock:
- kvs.set(test_key, 'lock', first_lock)
- with kvs.get_lock(key) as second_lock:
- kvs.set(key, 'lock-within-a-lock', second_lock)
-
- self.assertRaises(exception.UnexpectedError,
- lock_within_a_lock,
- key=test_key)
-
-
-class TestMemcachedBackend(unit.TestCase):
-
- @mock.patch('keystone.common.kvs.backends.memcached._', six.text_type)
- def test_invalid_backend_fails_initialization(self):
- raises_valueerror = matchers.Raises(matchers.MatchesException(
- ValueError, r'.*FakeBackend.*'))
-
- options = {
- 'url': 'needed to get to the focus of this test (the backend)',
- 'memcached_backend': 'FakeBackend',
- }
- self.assertThat(lambda: memcached.MemcachedBackend(options),
- raises_valueerror)
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
deleted file mode 100644
index 4bce6a73..00000000
--- a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
+++ /dev/null
@@ -1,217 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import subprocess
-import uuid
-
-import ldap.modlist
-from oslo_config import cfg
-from six.moves import range
-
-from keystone import exception
-from keystone.identity.backends import ldap as identity_ldap
-from keystone.tests import unit
-from keystone.tests.unit import test_backend_ldap
-
-
-CONF = cfg.CONF
-
-
-def create_object(dn, attrs):
- conn = ldap.initialize(CONF.ldap.url)
- conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password)
- ldif = ldap.modlist.addModlist(attrs)
- conn.add_s(dn, ldif)
- conn.unbind_s()
-
-
-class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
-
- def setUp(self):
- self._ldap_skip_live()
- super(LiveLDAPIdentity, self).setUp()
-
- def _ldap_skip_live(self):
- self.skip_if_env_not_set('ENABLE_LDAP_LIVE_TEST')
-
- def clear_database(self):
- devnull = open('/dev/null', 'w')
- subprocess.call(['ldapdelete',
- '-x',
- '-D', CONF.ldap.user,
- '-H', CONF.ldap.url,
- '-w', CONF.ldap.password,
- '-r', CONF.ldap.suffix],
- stderr=devnull)
-
- if CONF.ldap.suffix.startswith('ou='):
- tree_dn_attrs = {'objectclass': 'organizationalUnit',
- 'ou': 'openstack'}
- else:
- tree_dn_attrs = {'objectclass': ['dcObject', 'organizationalUnit'],
- 'dc': 'openstack',
- 'ou': 'openstack'}
- create_object(CONF.ldap.suffix, tree_dn_attrs)
- create_object(CONF.ldap.user_tree_dn,
- {'objectclass': 'organizationalUnit',
- 'ou': 'Users'})
- create_object(CONF.ldap.role_tree_dn,
- {'objectclass': 'organizationalUnit',
- 'ou': 'Roles'})
- create_object(CONF.ldap.group_tree_dn,
- {'objectclass': 'organizationalUnit',
- 'ou': 'UserGroups'})
-
- def config_files(self):
- config_files = super(LiveLDAPIdentity, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_liveldap.conf'))
- return config_files
-
- def test_build_tree(self):
- """Regression test for building the tree names."""
- # logic is different from the fake backend.
- user_api = identity_ldap.UserApi(CONF)
- self.assertTrue(user_api)
- self.assertEqual(user_api.tree_dn, CONF.ldap.user_tree_dn)
-
- def test_ldap_dereferencing(self):
- alt_users_ldif = {'objectclass': ['top', 'organizationalUnit'],
- 'ou': 'alt_users'}
- alt_fake_user_ldif = {'objectclass': ['person', 'inetOrgPerson'],
- 'cn': 'alt_fake1',
- 'sn': 'alt_fake1'}
- aliased_users_ldif = {'objectclass': ['alias', 'extensibleObject'],
- 'aliasedobjectname': "ou=alt_users,%s" %
- CONF.ldap.suffix}
- create_object("ou=alt_users,%s" % CONF.ldap.suffix, alt_users_ldif)
- create_object("%s=alt_fake1,ou=alt_users,%s" %
- (CONF.ldap.user_id_attribute, CONF.ldap.suffix),
- alt_fake_user_ldif)
- create_object("ou=alt_users,%s" % CONF.ldap.user_tree_dn,
- aliased_users_ldif)
-
- self.config_fixture.config(group='ldap',
- query_scope='sub',
- alias_dereferencing='never')
- self.identity_api = identity_ldap.Identity()
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- 'alt_fake1')
-
- self.config_fixture.config(group='ldap',
- alias_dereferencing='searching')
- self.identity_api = identity_ldap.Identity()
- user_ref = self.identity_api.get_user('alt_fake1')
- self.assertEqual('alt_fake1', user_ref['id'])
-
- self.config_fixture.config(group='ldap', alias_dereferencing='always')
- self.identity_api = identity_ldap.Identity()
- user_ref = self.identity_api.get_user('alt_fake1')
- self.assertEqual('alt_fake1', user_ref['id'])
-
- # FakeLDAP does not correctly process filters, so this test can only be
- # run against a live LDAP server
- def test_list_groups_for_user_filtered(self):
- domain = self._get_domain_fixture()
- test_groups = []
- test_users = []
- GROUP_COUNT = 3
- USER_COUNT = 2
-
- for x in range(0, USER_COUNT):
- # TODO(shaleh): use unit.new_user_ref()
- new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
- new_user = self.identity_api.create_user(new_user)
- test_users.append(new_user)
- positive_user = test_users[0]
- negative_user = test_users[1]
-
- for x in range(0, USER_COUNT):
- group_refs = self.identity_api.list_groups_for_user(
- test_users[x]['id'])
- self.assertEqual(0, len(group_refs))
-
- for x in range(0, GROUP_COUNT):
- new_group = unit.new_group_ref(domain_id=domain['id'])
- new_group = self.identity_api.create_group(new_group)
- test_groups.append(new_group)
-
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(x, len(group_refs))
-
- self.identity_api.add_user_to_group(
- positive_user['id'],
- new_group['id'])
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(x + 1, len(group_refs))
-
- group_refs = self.identity_api.list_groups_for_user(
- negative_user['id'])
- self.assertEqual(0, len(group_refs))
-
- driver = self.identity_api._select_identity_driver(
- CONF.identity.default_domain_id)
- driver.group.ldap_filter = '(dn=xx)'
-
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(0, len(group_refs))
- group_refs = self.identity_api.list_groups_for_user(
- negative_user['id'])
- self.assertEqual(0, len(group_refs))
-
- driver.group.ldap_filter = '(objectclass=*)'
-
- group_refs = self.identity_api.list_groups_for_user(
- positive_user['id'])
- self.assertEqual(GROUP_COUNT, len(group_refs))
- group_refs = self.identity_api.list_groups_for_user(
- negative_user['id'])
- self.assertEqual(0, len(group_refs))
-
- def test_user_enable_attribute_mask(self):
- self.config_fixture.config(
- group='ldap',
- user_enabled_emulation=False,
- user_enabled_attribute='employeeType')
- super(LiveLDAPIdentity, self).test_user_enable_attribute_mask()
-
- def test_create_project_case_sensitivity(self):
- # The attribute used for the live LDAP tests is case insensitive.
-
- def call_super():
- (super(LiveLDAPIdentity, self).
- test_create_project_case_sensitivity())
-
- self.assertRaises(exception.Conflict, call_super)
-
- def test_create_user_case_sensitivity(self):
- # The attribute used for the live LDAP tests is case insensitive.
-
- def call_super():
- super(LiveLDAPIdentity, self).test_create_user_case_sensitivity()
-
- self.assertRaises(exception.Conflict, call_super)
-
- def test_project_update_missing_attrs_with_a_falsey_value(self):
- # The description attribute doesn't allow an empty value.
-
- def call_super():
- (super(LiveLDAPIdentity, self).
- test_project_update_missing_attrs_with_a_falsey_value())
-
- self.assertRaises(ldap.INVALID_SYNTAX, call_super)
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
deleted file mode 100644
index a284114a..00000000
--- a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import ldappool
-from oslo_config import cfg
-
-from keystone.common.ldap import core as ldap_core
-from keystone.identity.backends import ldap
-from keystone.tests import unit
-from keystone.tests.unit import fakeldap
-from keystone.tests.unit import test_backend_ldap_pool
-from keystone.tests.unit import test_ldap_livetest
-
-
-CONF = cfg.CONF
-
-
-class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin,
- test_ldap_livetest.LiveLDAPIdentity):
- """Executes existing LDAP live test with pooled LDAP handler.
-
- Also executes common pool specific tests via Mixin class.
-
- """
-
- def setUp(self):
- super(LiveLDAPPoolIdentity, self).setUp()
- self.addCleanup(self.cleanup_pools)
- # storing to local variable to avoid long references
- self.conn_pools = ldap_core.PooledLDAPHandler.connection_pools
-
- def config_files(self):
- config_files = super(LiveLDAPPoolIdentity, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_pool_liveldap.conf'))
- return config_files
-
- def test_assert_connector_used_not_fake_ldap_pool(self):
- handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True)
- self.assertNotEqual(type(handler.Connector),
- type(fakeldap.FakeLdapPool))
- self.assertEqual(type(ldappool.StateConnector),
- type(handler.Connector))
-
- def test_async_search_and_result3(self):
- self.config_fixture.config(group='ldap', page_size=1)
- self.test_user_enable_attribute_mask()
-
- def test_pool_size_expands_correctly(self):
-
- who = CONF.ldap.user
- cred = CONF.ldap.password
- # get related connection manager instance
- ldappool_cm = self.conn_pools[CONF.ldap.url]
-
- def _get_conn():
- return ldappool_cm.connection(who, cred)
-
- with _get_conn() as c1: # 1
- self.assertEqual(1, len(ldappool_cm))
- self.assertTrue(c1.connected, True)
- self.assertTrue(c1.active, True)
- with _get_conn() as c2: # conn2
- self.assertEqual(2, len(ldappool_cm))
- self.assertTrue(c2.connected)
- self.assertTrue(c2.active)
-
- self.assertEqual(2, len(ldappool_cm))
- # c2 went out of context, its connected but not active
- self.assertTrue(c2.connected)
- self.assertFalse(c2.active)
- with _get_conn() as c3: # conn3
- self.assertEqual(2, len(ldappool_cm))
- self.assertTrue(c3.connected)
- self.assertTrue(c3.active)
- self.assertTrue(c3 is c2) # same connection is reused
- self.assertTrue(c2.active)
- with _get_conn() as c4: # conn4
- self.assertEqual(3, len(ldappool_cm))
- self.assertTrue(c4.connected)
- self.assertTrue(c4.active)
-
- def test_password_change_with_auth_pool_disabled(self):
- self.config_fixture.config(group='ldap', use_auth_pool=False)
- old_password = self.user_sna['password']
-
- self.test_password_change_with_pool()
-
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={},
- user_id=self.user_sna['id'],
- password=old_password)
-
- def _create_user_and_authenticate(self, password):
- # TODO(shaleh): port to new_user_ref()
- user_dict = {
- 'domain_id': CONF.identity.default_domain_id,
- 'name': uuid.uuid4().hex,
- 'password': password}
- user = self.identity_api.create_user(user_dict)
-
- self.identity_api.authenticate(
- context={},
- user_id=user['id'],
- password=password)
-
- return self.identity_api.get_user(user['id'])
-
- def _get_auth_conn_pool_cm(self):
- pool_url = ldap_core.PooledLDAPHandler.auth_pool_prefix + CONF.ldap.url
- return self.conn_pools[pool_url]
-
- def _do_password_change_for_one_user(self, password, new_password):
- self.config_fixture.config(group='ldap', use_auth_pool=True)
- self.cleanup_pools()
- self.load_backends()
-
- user1 = self._create_user_and_authenticate(password)
- auth_cm = self._get_auth_conn_pool_cm()
- self.assertEqual(1, len(auth_cm))
- user2 = self._create_user_and_authenticate(password)
- self.assertEqual(1, len(auth_cm))
- user3 = self._create_user_and_authenticate(password)
- self.assertEqual(1, len(auth_cm))
- user4 = self._create_user_and_authenticate(password)
- self.assertEqual(1, len(auth_cm))
- user5 = self._create_user_and_authenticate(password)
- self.assertEqual(1, len(auth_cm))
-
- # connection pool size remains 1 even for different user ldap bind
- # as there is only one active connection at a time
-
- user_api = ldap.UserApi(CONF)
- u1_dn = user_api._id_to_dn_string(user1['id'])
- u2_dn = user_api._id_to_dn_string(user2['id'])
- u3_dn = user_api._id_to_dn_string(user3['id'])
- u4_dn = user_api._id_to_dn_string(user4['id'])
- u5_dn = user_api._id_to_dn_string(user5['id'])
-
- # now create multiple active connections for end user auth case which
- # will force to keep them in pool. After that, modify one of user
- # password. Need to make sure that user connection is in middle
- # of pool list.
- auth_cm = self._get_auth_conn_pool_cm()
- with auth_cm.connection(u1_dn, password) as _:
- with auth_cm.connection(u2_dn, password) as _:
- with auth_cm.connection(u3_dn, password) as _:
- with auth_cm.connection(u4_dn, password) as _:
- with auth_cm.connection(u5_dn, password) as _:
- self.assertEqual(5, len(auth_cm))
- _.unbind_s()
-
- user3['password'] = new_password
- self.identity_api.update_user(user3['id'], user3)
-
- return user3
-
- def test_password_change_with_auth_pool_enabled_long_lifetime(self):
- self.config_fixture.config(group='ldap',
- auth_pool_connection_lifetime=600)
- old_password = 'my_password'
- new_password = 'new_password'
- user = self._do_password_change_for_one_user(old_password,
- new_password)
- user.pop('password')
-
- # with long connection lifetime auth_pool can bind to old password
- # successfully which is not desired if password change is frequent
- # use case in a deployment.
- # This can happen in multiple concurrent connections case only.
- user_ref = self.identity_api.authenticate(
- context={}, user_id=user['id'], password=old_password)
-
- self.assertDictEqual(user, user_ref)
-
- def test_password_change_with_auth_pool_enabled_no_lifetime(self):
- self.config_fixture.config(group='ldap',
- auth_pool_connection_lifetime=0)
-
- old_password = 'my_password'
- new_password = 'new_password'
- user = self._do_password_change_for_one_user(old_password,
- new_password)
- # now as connection lifetime is zero, so authentication
- # with old password will always fail.
- self.assertRaises(AssertionError,
- self.identity_api.authenticate,
- context={}, user_id=user['id'],
- password=old_password)
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
deleted file mode 100644
index 98e2882d..00000000
--- a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-# Copyright 2013 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import ldap.modlist
-from oslo_config import cfg
-
-from keystone import exception
-from keystone import identity
-from keystone.tests import unit
-from keystone.tests.unit import test_ldap_livetest
-
-
-CONF = cfg.CONF
-
-
-def create_object(dn, attrs):
- conn = ldap.initialize(CONF.ldap.url)
- conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password)
- ldif = ldap.modlist.addModlist(attrs)
- conn.add_s(dn, ldif)
- conn.unbind_s()
-
-
-class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity):
-
- def _ldap_skip_live(self):
- self.skip_if_env_not_set('ENABLE_TLS_LDAP_LIVE_TEST')
-
- def config_files(self):
- config_files = super(LiveTLSLDAPIdentity, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_tls_liveldap.conf'))
- return config_files
-
- def test_tls_certfile_demand_option(self):
- self.config_fixture.config(group='ldap',
- use_tls=True,
- tls_cacertdir=None,
- tls_req_cert='demand')
- self.identity_api = identity.backends.ldap.Identity()
-
- # TODO(shaleh): use new_user_ref()
- user = {'name': 'fake1',
- 'password': 'fakepass1',
- 'tenants': ['bar']}
- user = self.identity_api.create_user('user')
- user_ref = self.identity_api.get_user(user['id'])
- self.assertEqual(user['id'], user_ref['id'])
-
- user['password'] = 'fakepass2'
- self.identity_api.update_user(user['id'], user)
-
- self.identity_api.delete_user(user['id'])
- self.assertRaises(exception.UserNotFound, self.identity_api.get_user,
- user['id'])
-
- def test_tls_certdir_demand_option(self):
- self.config_fixture.config(group='ldap',
- use_tls=True,
- tls_cacertdir=None,
- tls_req_cert='demand')
- self.identity_api = identity.backends.ldap.Identity()
-
- # TODO(shaleh): use new_user_ref()
- user = {'id': 'fake1',
- 'name': 'fake1',
- 'password': 'fakepass1',
- 'tenants': ['bar']}
- self.identity_api.create_user('fake1', user)
- user_ref = self.identity_api.get_user('fake1')
- self.assertEqual('fake1', user_ref['id'])
-
- user['password'] = 'fakepass2'
- self.identity_api.update_user('fake1', user)
-
- self.identity_api.delete_user('fake1')
- self.assertRaises(exception.UserNotFound, self.identity_api.get_user,
- 'fake1')
-
- def test_tls_bad_certfile(self):
- self.config_fixture.config(
- group='ldap',
- use_tls=True,
- tls_req_cert='demand',
- tls_cacertfile='/etc/keystone/ssl/certs/mythicalcert.pem',
- tls_cacertdir=None)
- self.identity_api = identity.backends.ldap.Identity()
-
- # TODO(shaleh): use new_user_ref()
- user = {'name': 'fake1',
- 'password': 'fakepass1',
- 'tenants': ['bar']}
- self.assertRaises(IOError, self.identity_api.create_user, user)
-
- def test_tls_bad_certdir(self):
- self.config_fixture.config(
- group='ldap',
- use_tls=True,
- tls_cacertfile=None,
- tls_req_cert='demand',
- tls_cacertdir='/etc/keystone/ssl/mythicalcertdir')
- self.identity_api = identity.backends.ldap.Identity()
-
- # TODO(shaleh): use new_user_ref()
- user = {'name': 'fake1',
- 'password': 'fakepass1',
- 'tenants': ['bar']}
- self.assertRaises(IOError, self.identity_api.create_user, user)
diff --git a/keystone-moon/keystone/tests/unit/test_middleware.py b/keystone-moon/keystone/tests/unit/test_middleware.py
deleted file mode 100644
index d33e8c00..00000000
--- a/keystone-moon/keystone/tests/unit/test_middleware.py
+++ /dev/null
@@ -1,764 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import hashlib
-import uuid
-
-from oslo_config import cfg
-from six.moves import http_client
-import webtest
-
-from keystone.common import authorization
-from keystone.common import tokenless_auth
-from keystone import exception
-from keystone.federation import constants as federation_constants
-from keystone import middleware
-from keystone.tests import unit
-from keystone.tests.unit import mapping_fixtures
-from keystone.tests.unit import test_backend_sql
-
-
-CONF = cfg.CONF
-
-
-class MiddlewareRequestTestBase(unit.TestCase):
-
- MIDDLEWARE_CLASS = None # override this in subclasses
-
- def _application(self):
- """A base wsgi application that returns a simple response."""
- def app(environ, start_response):
- # WSGI requires the body of the response to be six.binary_type
- body = uuid.uuid4().hex.encode('utf-8')
- resp_headers = [('Content-Type', 'text/html; charset=utf8'),
- ('Content-Length', str(len(body)))]
- start_response('200 OK', resp_headers)
- return [body]
-
- return app
-
- def _generate_app_response(self, app, headers=None, method='get',
- path='/', **kwargs):
- """Given a wsgi application wrap it in webtest and call it."""
- return getattr(webtest.TestApp(app), method)(path,
- headers=headers or {},
- **kwargs)
-
- def _middleware_failure(self, exc, *args, **kwargs):
- """Assert that an exception is being thrown from process_request."""
- # NOTE(jamielennox): This is a little ugly. We need to call the webtest
- # framework so that the correct RequestClass object is created for when
- # we call process_request. However because we go via webtest we only
- # see the response object and not the actual exception that is thrown
- # by process_request. To get around this we subclass process_request
- # with something that checks for the right type of exception being
- # thrown so we can test the middle of the request process.
- # TODO(jamielennox): Change these tests to test the value of the
- # response rather than the error that is raised.
-
- class _Failing(self.MIDDLEWARE_CLASS):
-
- _called = False
-
- def process_request(i_self, *i_args, **i_kwargs):
- # i_ to distinguish it from and not clobber the outer vars
- e = self.assertRaises(exc,
- super(_Failing, i_self).process_request,
- *i_args, **i_kwargs)
- i_self._called = True
- raise e
-
- # by default the returned status when an uncaught exception is raised
- # for validation or caught errors this will likely be 400
- kwargs.setdefault('status', http_client.INTERNAL_SERVER_ERROR) # 500
-
- app = _Failing(self._application())
- resp = self._generate_app_response(app, *args, **kwargs)
- self.assertTrue(app._called)
- return resp
-
- def _do_middleware_response(self, *args, **kwargs):
- """Wrap a middleware around a sample application and call it."""
- app = self.MIDDLEWARE_CLASS(self._application())
- return self._generate_app_response(app, *args, **kwargs)
-
- def _do_middleware_request(self, *args, **kwargs):
- """The request object from a successful middleware call."""
- return self._do_middleware_response(*args, **kwargs).request
-
-
-class TokenAuthMiddlewareTest(MiddlewareRequestTestBase):
-
- MIDDLEWARE_CLASS = middleware.TokenAuthMiddleware
-
- def test_request(self):
- headers = {middleware.AUTH_TOKEN_HEADER: 'MAGIC'}
- req = self._do_middleware_request(headers=headers)
- context = req.environ[middleware.CONTEXT_ENV]
- self.assertEqual('MAGIC', context['token_id'])
-
-
-class AdminTokenAuthMiddlewareTest(MiddlewareRequestTestBase):
-
- MIDDLEWARE_CLASS = middleware.AdminTokenAuthMiddleware
-
- def config_overrides(self):
- super(AdminTokenAuthMiddlewareTest, self).config_overrides()
- self.config_fixture.config(
- admin_token='ADMIN')
-
- def test_request_admin(self):
- headers = {middleware.AUTH_TOKEN_HEADER: 'ADMIN'}
- req = self._do_middleware_request(headers=headers)
- self.assertTrue(req.environ[middleware.CONTEXT_ENV]['is_admin'])
-
- def test_request_non_admin(self):
- headers = {middleware.AUTH_TOKEN_HEADER: 'NOT-ADMIN'}
- req = self._do_middleware_request(headers=headers)
- self.assertFalse(req.environ[middleware.CONTEXT_ENV]['is_admin'])
-
-
-class JsonBodyMiddlewareTest(MiddlewareRequestTestBase):
-
- MIDDLEWARE_CLASS = middleware.JsonBodyMiddleware
-
- def test_request_with_params(self):
- headers = {'Content-Type': 'application/json'}
- params = '{"arg1": "one", "arg2": ["a"]}'
- req = self._do_middleware_request(params=params,
- headers=headers,
- method='post')
- self.assertEqual({"arg1": "one", "arg2": ["a"]},
- req.environ[middleware.PARAMS_ENV])
-
- def test_malformed_json(self):
- headers = {'Content-Type': 'application/json'}
- self._do_middleware_response(params='{"arg1": "on',
- headers=headers,
- method='post',
- status=http_client.BAD_REQUEST)
-
- def test_not_dict_body(self):
- headers = {'Content-Type': 'application/json'}
- resp = self._do_middleware_response(params='42',
- headers=headers,
- method='post',
- status=http_client.BAD_REQUEST)
-
- self.assertIn('valid JSON object', resp.json['error']['message'])
-
- def test_no_content_type(self):
- headers = {'Content-Type': ''}
- params = '{"arg1": "one", "arg2": ["a"]}'
- req = self._do_middleware_request(params=params,
- headers=headers,
- method='post')
- self.assertEqual({"arg1": "one", "arg2": ["a"]},
- req.environ[middleware.PARAMS_ENV])
-
- def test_unrecognized_content_type(self):
- headers = {'Content-Type': 'text/plain'}
- self._do_middleware_response(params='{"arg1": "one", "arg2": ["a"]}',
- headers=headers,
- method='post',
- status=http_client.BAD_REQUEST)
-
- def test_unrecognized_content_type_without_body(self):
- headers = {'Content-Type': 'text/plain'}
- req = self._do_middleware_request(headers=headers)
- self.assertEqual({}, req.environ.get(middleware.PARAMS_ENV, {}))
-
-
-class AuthContextMiddlewareTest(test_backend_sql.SqlTests,
- MiddlewareRequestTestBase):
-
- MIDDLEWARE_CLASS = middleware.AuthContextMiddleware
-
- def setUp(self):
- super(AuthContextMiddlewareTest, self).setUp()
- self.client_issuer = uuid.uuid4().hex
- self.untrusted_client_issuer = uuid.uuid4().hex
- self.trusted_issuer = self.client_issuer
- self.config_fixture.config(group='tokenless_auth',
- trusted_issuer=[self.trusted_issuer])
-
- # client_issuer is encoded because you can't hash
- # unicode objects with hashlib.
- # This idp_id is calculated based on sha256(self.client_issuer)
- hashed_idp = hashlib.sha256(self.client_issuer.encode('utf-8'))
- self.idp_id = hashed_idp.hexdigest()
- self._load_sample_data()
-
- def _load_sample_data(self):
- self.protocol_id = 'x509'
-
- # 1) Create a domain for the user.
- self.domain = unit.new_domain_ref()
- self.domain_id = self.domain['id']
- self.domain_name = self.domain['name']
- self.resource_api.create_domain(self.domain_id, self.domain)
-
- # 2) Create a project for the user.
- self.project = unit.new_project_ref(domain_id=self.domain_id)
- self.project_id = self.project['id']
- self.project_name = self.project['name']
-
- self.resource_api.create_project(self.project_id, self.project)
-
- # 3) Create a user in new domain.
- self.user = unit.new_user_ref(domain_id=self.domain_id,
- project_id=self.project_id)
-
- self.user = self.identity_api.create_user(self.user)
-
- # Add IDP
- self.idp = self._idp_ref(id=self.idp_id)
- self.federation_api.create_idp(self.idp['id'],
- self.idp)
-
- # Add a role
- self.role = unit.new_role_ref()
- self.role_id = self.role['id']
- self.role_name = self.role['name']
- self.role_api.create_role(self.role_id, self.role)
-
- # Add a group
- self.group = unit.new_group_ref(domain_id=self.domain_id)
- self.group = self.identity_api.create_group(self.group)
-
- # Assign a role to the user on a project
- self.assignment_api.add_role_to_user_and_project(
- user_id=self.user['id'],
- tenant_id=self.project_id,
- role_id=self.role_id)
-
- # Assign a role to the group on a project
- self.assignment_api.create_grant(
- role_id=self.role_id,
- group_id=self.group['id'],
- project_id=self.project_id)
-
- def _load_mapping_rules(self, rules):
- # Add a mapping
- self.mapping = self._mapping_ref(rules=rules)
- self.federation_api.create_mapping(self.mapping['id'],
- self.mapping)
- # Add protocols
- self.proto_x509 = self._proto_ref(mapping_id=self.mapping['id'])
- self.proto_x509['id'] = self.protocol_id
- self.federation_api.create_protocol(self.idp['id'],
- self.proto_x509['id'],
- self.proto_x509)
-
- def _idp_ref(self, id=None):
- idp = {
- 'id': id or uuid.uuid4().hex,
- 'enabled': True,
- 'description': uuid.uuid4().hex
- }
- return idp
-
- def _proto_ref(self, mapping_id=None):
- proto = {
- 'id': uuid.uuid4().hex,
- 'mapping_id': mapping_id or uuid.uuid4().hex
- }
- return proto
-
- def _mapping_ref(self, rules=None):
- if rules is None:
- mapped_rules = {}
- else:
- mapped_rules = rules.get('rules', {})
- return {
- 'id': uuid.uuid4().hex,
- 'rules': mapped_rules
- }
-
- def _assert_tokenless_auth_context(self, context, ephemeral_user=False):
- self.assertIsNotNone(context)
- self.assertEqual(self.project_id, context['project_id'])
- self.assertIn(self.role_name, context['roles'])
- if ephemeral_user:
- self.assertEqual(self.group['id'], context['group_ids'][0])
- self.assertEqual('ephemeral',
- context[federation_constants.PROTOCOL])
- self.assertEqual(self.idp_id,
- context[federation_constants.IDENTITY_PROVIDER])
- else:
- self.assertEqual(self.user['id'], context['user_id'])
-
- def _create_context(self, request, mapping_ref=None,
- exception_expected=False):
- """Builds the auth context from the given arguments.
-
- auth context will be returned from the AuthContextMiddleware based on
- what is being passed in the given request and what mapping is being
- setup in the backend DB.
-
- :param request: HTTP request
- :param mapping_ref: A mapping in JSON structure will be setup in the
- backend DB for mapping a user or a group.
- :param exception_expected: Sets to True when an exception is expected
- to raised based on the given arguments.
- :returns: context an auth context contains user and role information
- :rtype: dict
- """
- if mapping_ref:
- self._load_mapping_rules(mapping_ref)
-
- if not exception_expected:
- (middleware.AuthContextMiddleware('Tokenless_auth_test').
- process_request(request))
- context = request.environ.get(authorization.AUTH_CONTEXT_ENV)
- else:
- context = middleware.AuthContextMiddleware('Tokenless_auth_test')
- return context
-
- def test_context_already_exists(self):
- stub_value = uuid.uuid4().hex
- env = {authorization.AUTH_CONTEXT_ENV: stub_value}
- req = self._do_middleware_request(extra_environ=env)
- self.assertEqual(stub_value,
- req.environ.get(authorization.AUTH_CONTEXT_ENV))
-
- def test_not_applicable_to_token_request(self):
- req = self._do_middleware_request(path='/auth/tokens', method='post')
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self.assertIsNone(context)
-
- def test_no_tokenless_attributes_request(self):
- req = self._do_middleware_request()
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self.assertIsNone(context)
-
- def test_no_issuer_attribute_request(self):
- env = {}
- env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self.assertIsNone(context)
-
- def test_has_only_issuer_and_project_name_request(self):
- env = {}
- # SSL_CLIENT_I_DN is the attribute name that wsgi env
- # references to issuer of the client certificate.
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_has_only_issuer_and_project_domain_name_request(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_has_only_issuer_and_project_domain_id_request(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_DOMAIN_ID'] = uuid.uuid4().hex
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_missing_both_domain_and_project_request(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_empty_trusted_issuer_list(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
-
- self.config_fixture.config(group='tokenless_auth',
- trusted_issuer=[])
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self.assertIsNone(context)
-
- def test_client_issuer_not_trusted(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.untrusted_client_issuer
- env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self.assertIsNone(context)
-
- def test_proj_scope_with_proj_id_and_proj_dom_id_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = self.project_id
- env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- # SSL_CLIENT_USER_NAME and SSL_CLIENT_DOMAIN_NAME are the types
- # defined in the mapping that will map to the user name and
- # domain name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_proj_scope_with_proj_id_only_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = self.project_id
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_proj_scope_with_proj_name_and_proj_dom_id_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_proj_scope_with_proj_name_and_proj_dom_name_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_proj_scope_with_proj_name_only_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_id
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
-
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_mapping_with_userid_and_domainid_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_ID'] = self.user['id']
- env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINID)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_mapping_with_userid_and_domainname_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_ID'] = self.user['id']
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINNAME)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_mapping_with_username_and_domainid_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_only_domain_name_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = self.project_id
- env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_DOMAINNAME_ONLY)
-
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_only_domain_id_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = self.project_id
- env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_DOMAINID_ONLY)
-
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_missing_domain_data_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = self.project_id
- env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_ONLY)
-
- self._middleware_failure(exception.ValidationError,
- extra_environ=env,
- status=400)
-
- def test_userid_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = self.project_id
- env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_USER_ID'] = self.user['id']
-
- self._load_mapping_rules(mapping_fixtures.MAPPING_WITH_USERID_ONLY)
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context)
-
- def test_domain_disable_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
-
- self.domain['enabled'] = False
- self.domain = self.resource_api.update_domain(
- self.domain['id'], self.domain)
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
- self._middleware_failure(exception.Unauthorized,
- extra_environ=env,
- status=401)
-
- def test_user_disable_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
-
- self.user['enabled'] = False
- self.user = self.identity_api.update_user(self.user['id'], self.user)
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
-
- self._middleware_failure(AssertionError,
- extra_environ=env)
-
- def test_invalid_user_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_ID'] = self.project_id
- env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex
- env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
-
- self._load_mapping_rules(
- mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
-
- self._middleware_failure(exception.UserNotFound,
- extra_environ=env,
- status=404)
-
- def test_ephemeral_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- self.config_fixture.config(group='tokenless_auth',
- protocol='ephemeral')
- self.protocol_id = 'ephemeral'
- mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
- mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- self._load_mapping_rules(mapping)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context, ephemeral_user=True)
-
- def test_ephemeral_with_default_user_type_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- self.config_fixture.config(group='tokenless_auth',
- protocol='ephemeral')
- self.protocol_id = 'ephemeral'
- # this mapping does not have the user type defined
- # and it should defaults to 'ephemeral' which is
- # the expected type for the test case.
- mapping = copy.deepcopy(
- mapping_fixtures.MAPPING_FOR_DEFAULT_EPHEMERAL_USER)
- mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- self._load_mapping_rules(mapping)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context, ephemeral_user=True)
-
- def test_ephemeral_any_user_success(self):
- """Verify ephemeral user does not need a specified user.
-
- Keystone is not looking to match the user, but a corresponding group.
- """
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex
- self.config_fixture.config(group='tokenless_auth',
- protocol='ephemeral')
- self.protocol_id = 'ephemeral'
- mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
- mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- self._load_mapping_rules(mapping)
-
- req = self._do_middleware_request(extra_environ=env)
- context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self._assert_tokenless_auth_context(context, ephemeral_user=True)
-
- def test_ephemeral_invalid_scope_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- self.config_fixture.config(group='tokenless_auth',
- protocol='ephemeral')
- self.protocol_id = 'ephemeral'
- mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
- mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- self._load_mapping_rules(mapping)
-
- self._middleware_failure(exception.Unauthorized,
- extra_environ=env,
- status=401)
-
- def test_ephemeral_no_group_found_fail(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- self.config_fixture.config(group='tokenless_auth',
- protocol='ephemeral')
- self.protocol_id = 'ephemeral'
- mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
- mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex
- self._load_mapping_rules(mapping)
-
- self._middleware_failure(exception.MappedGroupNotFound,
- extra_environ=env)
-
- def test_ephemeral_incorrect_mapping_fail(self):
- """Test ephemeral user picking up the non-ephemeral user mapping.
-
- Looking up the mapping with protocol Id 'x509' will load up
- the non-ephemeral user mapping, results unauthenticated.
- """
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- env['HTTP_X_PROJECT_NAME'] = self.project_name
- env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user['name']
- # This will pick up the incorrect mapping
- self.config_fixture.config(group='tokenless_auth',
- protocol='x509')
- self.protocol_id = 'x509'
- mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
- mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex
- self._load_mapping_rules(mapping)
-
- self._middleware_failure(exception.MappedGroupNotFound,
- extra_environ=env)
-
- def test_create_idp_id_success(self):
- env = {}
- env['SSL_CLIENT_I_DN'] = self.client_issuer
- auth = tokenless_auth.TokenlessAuthHelper(env)
- idp_id = auth._build_idp_id()
- self.assertEqual(self.idp_id, idp_id)
-
- def test_create_idp_id_attri_not_found_fail(self):
- env = {}
- env[uuid.uuid4().hex] = self.client_issuer
- auth = tokenless_auth.TokenlessAuthHelper(env)
- expected_msg = ('Could not determine Identity Provider ID. The '
- 'configuration option %s was not found in the '
- 'request environment.' %
- CONF.tokenless_auth.issuer_attribute)
- # Check the content of the exception message as well
- self.assertRaisesRegexp(exception.TokenlessAuthConfigError,
- expected_msg,
- auth._build_idp_id)
diff --git a/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py b/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py
deleted file mode 100644
index bf60cff0..00000000
--- a/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import os
-
-from six.moves import http_client
-import webtest
-
-from keystone.tests import unit
-
-
-class TestNoAdminTokenAuth(unit.TestCase):
- def setUp(self):
- super(TestNoAdminTokenAuth, self).setUp()
- self.load_backends()
-
- self._generate_paste_config()
-
- self.admin_app = webtest.TestApp(
- self.loadapp(unit.dirs.tmp('no_admin_token_auth'), name='admin'),
- extra_environ=dict(REMOTE_ADDR='127.0.0.1'))
- self.addCleanup(setattr, self, 'admin_app', None)
-
- def _generate_paste_config(self):
- # Generate a file, based on keystone-paste.ini, that doesn't include
- # admin_token_auth in the pipeline
-
- with open(unit.dirs.etc('keystone-paste.ini'), 'r') as f:
- contents = f.read()
-
- new_contents = contents.replace(' admin_token_auth ', ' ')
-
- filename = unit.dirs.tmp('no_admin_token_auth-paste.ini')
- with open(filename, 'w') as f:
- f.write(new_contents)
- self.addCleanup(os.remove, filename)
-
- def test_request_no_admin_token_auth(self):
- # This test verifies that if the admin_token_auth middleware isn't
- # in the paste pipeline that users can still make requests.
-
- # Note(blk-u): Picked /v2.0/tenants because it's an operation that
- # requires is_admin in the context, any operation that requires
- # is_admin would work for this test.
- REQ_PATH = '/v2.0/tenants'
-
- # If the following does not raise, then the test is successful.
- self.admin_app.get(REQ_PATH, headers={'X-Auth-Token': 'NotAdminToken'},
- status=http_client.UNAUTHORIZED)
diff --git a/keystone-moon/keystone/tests/unit/test_policy.py b/keystone-moon/keystone/tests/unit/test_policy.py
deleted file mode 100644
index d6e911e9..00000000
--- a/keystone-moon/keystone/tests/unit/test_policy.py
+++ /dev/null
@@ -1,222 +0,0 @@
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import json
-import os
-
-from oslo_policy import policy as common_policy
-import six
-from testtools import matchers
-
-from keystone import exception
-from keystone.policy.backends import rules
-from keystone.tests import unit
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit.ksfixtures import temporaryfile
-
-
-class PolicyFileTestCase(unit.TestCase):
- def setUp(self):
- # self.tmpfilename should exist before setUp super is called
- # this is to ensure it is available for the config_fixture in
- # the config_overrides call.
- self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
- self.tmpfilename = self.tempfile.file_name
- super(PolicyFileTestCase, self).setUp()
- self.target = {}
-
- def _policy_fixture(self):
- return ksfixtures.Policy(self.tmpfilename, self.config_fixture)
-
- def test_modified_policy_reloads(self):
- action = "example:test"
- empty_credentials = {}
- with open(self.tmpfilename, "w") as policyfile:
- policyfile.write("""{"example:test": []}""")
- rules.enforce(empty_credentials, action, self.target)
- with open(self.tmpfilename, "w") as policyfile:
- policyfile.write("""{"example:test": ["false:false"]}""")
- rules._ENFORCER.clear()
- self.assertRaises(exception.ForbiddenAction, rules.enforce,
- empty_credentials, action, self.target)
-
-
-class PolicyTestCase(unit.TestCase):
- def setUp(self):
- super(PolicyTestCase, self).setUp()
- self.rules = {
- "true": [],
- "example:allowed": [],
- "example:denied": [["false:false"]],
- "example:get_http": [["http:http://www.example.com"]],
- "example:my_file": [["role:compute_admin"],
- ["project_id:%(project_id)s"]],
- "example:early_and_fail": [["false:false", "rule:true"]],
- "example:early_or_success": [["rule:true"], ["false:false"]],
- "example:lowercase_admin": [["role:admin"], ["role:sysadmin"]],
- "example:uppercase_admin": [["role:ADMIN"], ["role:sysadmin"]],
- }
-
- # NOTE(vish): then overload underlying policy engine
- self._set_rules()
- self.credentials = {}
- self.target = {}
-
- def _set_rules(self):
- these_rules = common_policy.Rules.from_dict(self.rules)
- rules._ENFORCER.set_rules(these_rules)
-
- def test_enforce_nonexistent_action_throws(self):
- action = "example:noexist"
- self.assertRaises(exception.ForbiddenAction, rules.enforce,
- self.credentials, action, self.target)
-
- def test_enforce_bad_action_throws(self):
- action = "example:denied"
- self.assertRaises(exception.ForbiddenAction, rules.enforce,
- self.credentials, action, self.target)
-
- def test_enforce_good_action(self):
- action = "example:allowed"
- rules.enforce(self.credentials, action, self.target)
-
- def test_templatized_enforcement(self):
- target_mine = {'project_id': 'fake'}
- target_not_mine = {'project_id': 'another'}
- credentials = {'project_id': 'fake', 'roles': []}
- action = "example:my_file"
- rules.enforce(credentials, action, target_mine)
- self.assertRaises(exception.ForbiddenAction, rules.enforce,
- credentials, action, target_not_mine)
-
- def test_early_AND_enforcement(self):
- action = "example:early_and_fail"
- self.assertRaises(exception.ForbiddenAction, rules.enforce,
- self.credentials, action, self.target)
-
- def test_early_OR_enforcement(self):
- action = "example:early_or_success"
- rules.enforce(self.credentials, action, self.target)
-
- def test_ignore_case_role_check(self):
- lowercase_action = "example:lowercase_admin"
- uppercase_action = "example:uppercase_admin"
- # NOTE(dprince): We mix case in the Admin role here to ensure
- # case is ignored
- admin_credentials = {'roles': ['AdMiN']}
- rules.enforce(admin_credentials, lowercase_action, self.target)
- rules.enforce(admin_credentials, uppercase_action, self.target)
-
-
-class DefaultPolicyTestCase(unit.TestCase):
- def setUp(self):
- super(DefaultPolicyTestCase, self).setUp()
-
- self.rules = {
- "default": [],
- "example:exist": [["false:false"]]
- }
- self._set_rules('default')
- self.credentials = {}
-
- # FIXME(gyee): latest Oslo policy Enforcer class reloads the rules in
- # its enforce() method even though rules has been initialized via
- # set_rules(). To make it easier to do our tests, we're going to
- # monkeypatch load_roles() so it does nothing. This seem like a bug in
- # Oslo policy as we shouldn't have to reload the rules if they have
- # already been set using set_rules().
- self._old_load_rules = rules._ENFORCER.load_rules
- self.addCleanup(setattr, rules._ENFORCER, 'load_rules',
- self._old_load_rules)
- rules._ENFORCER.load_rules = lambda *args, **kwargs: None
-
- def _set_rules(self, default_rule):
- these_rules = common_policy.Rules.from_dict(self.rules, default_rule)
- rules._ENFORCER.set_rules(these_rules)
-
- def test_policy_called(self):
- self.assertRaises(exception.ForbiddenAction, rules.enforce,
- self.credentials, "example:exist", {})
-
- def test_not_found_policy_calls_default(self):
- rules.enforce(self.credentials, "example:noexist", {})
-
- def test_default_not_found(self):
- new_default_rule = "default_noexist"
- # FIXME(gyee): need to overwrite the Enforcer's default_rule first
- # as it is recreating the rules with its own default_rule instead
- # of the default_rule passed in from set_rules(). I think this is a
- # bug in Oslo policy.
- rules._ENFORCER.default_rule = new_default_rule
- self._set_rules(new_default_rule)
- self.assertRaises(exception.ForbiddenAction, rules.enforce,
- self.credentials, "example:noexist", {})
-
-
-class PolicyJsonTestCase(unit.TestCase):
-
- def _load_entries(self, filename):
- return set(json.load(open(filename)))
-
- def test_json_examples_have_matching_entries(self):
- policy_keys = self._load_entries(unit.dirs.etc('policy.json'))
- cloud_policy_keys = self._load_entries(
- unit.dirs.etc('policy.v3cloudsample.json'))
-
- policy_extra_keys = ['admin_or_token_subject',
- 'service_admin_or_token_subject',
- 'token_subject', ]
- expected_policy_keys = list(cloud_policy_keys) + policy_extra_keys
- diffs = set(policy_keys).difference(set(expected_policy_keys))
-
- self.assertThat(diffs, matchers.Equals(set()))
-
- def test_all_targets_documented(self):
- # All the targets in the sample policy file must be documented in
- # doc/source/policy_mapping.rst.
-
- policy_keys = self._load_entries(unit.dirs.etc('policy.json'))
-
- # These keys are in the policy.json but aren't targets.
- policy_rule_keys = [
- 'admin_or_owner', 'admin_or_token_subject', 'admin_required',
- 'default', 'owner', 'service_admin_or_token_subject',
- 'service_or_admin', 'service_role', 'token_subject', ]
-
- def read_doc_targets():
- # Parse the doc/source/policy_mapping.rst file and return the
- # targets.
-
- doc_path = os.path.join(
- unit.ROOTDIR, 'doc', 'source', 'policy_mapping.rst')
- with open(doc_path) as doc_file:
- for line in doc_file:
- if line.startswith('Target'):
- break
- for line in doc_file:
- # Skip === line
- if line.startswith('==='):
- break
- for line in doc_file:
- line = line.rstrip()
- if not line or line.startswith(' '):
- continue
- if line.startswith('=='):
- break
- target, dummy, dummy = line.partition(' ')
- yield six.text_type(target)
-
- doc_targets = list(read_doc_targets())
- self.assertItemsEqual(policy_keys, doc_targets + policy_rule_keys)
diff --git a/keystone-moon/keystone/tests/unit/test_revoke.py b/keystone-moon/keystone/tests/unit/test_revoke.py
deleted file mode 100644
index 82c0125a..00000000
--- a/keystone-moon/keystone/tests/unit/test_revoke.py
+++ /dev/null
@@ -1,622 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import datetime
-import uuid
-
-import mock
-from oslo_utils import timeutils
-from six.moves import range
-from testtools import matchers
-
-from keystone.common import utils
-from keystone import exception
-from keystone.models import revoke_model
-from keystone.tests import unit
-from keystone.tests.unit import test_backend_sql
-from keystone.token import provider
-
-
-def _new_id():
- return uuid.uuid4().hex
-
-
-def _future_time():
- expire_delta = datetime.timedelta(seconds=1000)
- future_time = timeutils.utcnow() + expire_delta
- return future_time
-
-
-def _past_time():
- expire_delta = datetime.timedelta(days=-1000)
- past_time = timeutils.utcnow() + expire_delta
- return past_time
-
-
-def _sample_blank_token():
- issued_delta = datetime.timedelta(minutes=-2)
- issued_at = timeutils.utcnow() + issued_delta
- token_data = revoke_model.blank_token_data(issued_at)
- return token_data
-
-
-def _matches(event, token_values):
- """See if the token matches the revocation event.
-
- Used as a secondary check on the logic to Check
- By Tree Below: This is abrute force approach to checking.
- Compare each attribute from the event with the corresponding
- value from the token. If the event does not have a value for
- the attribute, a match is still possible. If the event has a
- value for the attribute, and it does not match the token, no match
- is possible, so skip the remaining checks.
-
- :param event: one revocation event to match
- :param token_values: dictionary with set of values taken from the
- token
- :returns: True if the token matches the revocation event, indicating the
- token has been revoked
- """
- # The token has three attributes that can match the user_id
- if event.user_id is not None:
- for attribute_name in ['user_id', 'trustor_id', 'trustee_id']:
- if event.user_id == token_values[attribute_name]:
- break
- else:
- return False
-
- # The token has two attributes that can match the domain_id
- if event.domain_id is not None:
- for attribute_name in ['identity_domain_id', 'assignment_domain_id']:
- if event.domain_id == token_values[attribute_name]:
- break
- else:
- return False
-
- if event.domain_scope_id is not None:
- if event.domain_scope_id != token_values['assignment_domain_id']:
- return False
-
- # If any one check does not match, the while token does
- # not match the event. The numerous return False indicate
- # that the token is still valid and short-circuits the
- # rest of the logic.
- attribute_names = ['project_id',
- 'expires_at', 'trust_id', 'consumer_id',
- 'access_token_id', 'audit_id', 'audit_chain_id']
- for attribute_name in attribute_names:
- if getattr(event, attribute_name) is not None:
- if (getattr(event, attribute_name) !=
- token_values[attribute_name]):
- return False
-
- if event.role_id is not None:
- roles = token_values['roles']
- for role in roles:
- if event.role_id == role:
- break
- else:
- return False
- if token_values['issued_at'] > event.issued_before:
- return False
- return True
-
-
-class RevokeTests(object):
-
- def test_list(self):
- self.revoke_api.revoke_by_user(user_id=1)
- self.assertEqual(1, len(self.revoke_api.list_events()))
-
- self.revoke_api.revoke_by_user(user_id=2)
- self.assertEqual(2, len(self.revoke_api.list_events()))
-
- def test_list_since(self):
- self.revoke_api.revoke_by_user(user_id=1)
- self.revoke_api.revoke_by_user(user_id=2)
- past = timeutils.utcnow() - datetime.timedelta(seconds=1000)
- self.assertEqual(2, len(self.revoke_api.list_events(last_fetch=past)))
- future = timeutils.utcnow() + datetime.timedelta(seconds=1000)
- self.assertEqual(0,
- len(self.revoke_api.list_events(last_fetch=future)))
-
- def test_past_expiry_are_removed(self):
- user_id = 1
- self.revoke_api.revoke_by_expiration(user_id, _future_time())
- self.assertEqual(1, len(self.revoke_api.list_events()))
- event = revoke_model.RevokeEvent()
- event.revoked_at = _past_time()
- self.revoke_api.revoke(event)
- self.assertEqual(1, len(self.revoke_api.list_events()))
-
- @mock.patch.object(timeutils, 'utcnow')
- def test_expired_events_removed_validate_token_success(self, mock_utcnow):
- def _sample_token_values():
- token = _sample_blank_token()
- token['expires_at'] = utils.isotime(_future_time(),
- subsecond=True)
- return token
-
- now = datetime.datetime.utcnow()
- now_plus_2h = now + datetime.timedelta(hours=2)
- mock_utcnow.return_value = now
-
- # Build a token and validate it. This will seed the cache for the
- # future 'synchronize' call.
- token_values = _sample_token_values()
-
- user_id = _new_id()
- self.revoke_api.revoke_by_user(user_id)
- token_values['user_id'] = user_id
- self.assertRaises(exception.TokenNotFound,
- self.revoke_api.check_token,
- token_values)
-
- # Move our clock forward by 2h, build a new token and validate it.
- # 'synchronize' should now be exercised and remove old expired events
- mock_utcnow.return_value = now_plus_2h
- self.revoke_api.revoke_by_expiration(_new_id(), now_plus_2h)
- # should no longer throw an exception
- self.revoke_api.check_token(token_values)
-
- def test_revoke_by_expiration_project_and_domain_fails(self):
- user_id = _new_id()
- expires_at = utils.isotime(_future_time(), subsecond=True)
- domain_id = _new_id()
- project_id = _new_id()
- self.assertThat(
- lambda: self.revoke_api.revoke_by_expiration(
- user_id, expires_at, domain_id=domain_id,
- project_id=project_id),
- matchers.raises(exception.UnexpectedError))
-
-
-class SqlRevokeTests(test_backend_sql.SqlTests, RevokeTests):
- def config_overrides(self):
- super(SqlRevokeTests, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='pki',
- revoke_by_id=False)
-
-
-class RevokeTreeTests(unit.TestCase):
- def setUp(self):
- super(RevokeTreeTests, self).setUp()
- self.events = []
- self.tree = revoke_model.RevokeTree()
- self._sample_data()
-
- def _sample_data(self):
- user_ids = []
- project_ids = []
- role_ids = []
- for i in range(0, 3):
- user_ids.append(_new_id())
- project_ids.append(_new_id())
- role_ids.append(_new_id())
-
- project_tokens = []
- i = len(project_tokens)
- project_tokens.append(_sample_blank_token())
- project_tokens[i]['user_id'] = user_ids[0]
- project_tokens[i]['project_id'] = project_ids[0]
- project_tokens[i]['roles'] = [role_ids[1]]
-
- i = len(project_tokens)
- project_tokens.append(_sample_blank_token())
- project_tokens[i]['user_id'] = user_ids[1]
- project_tokens[i]['project_id'] = project_ids[0]
- project_tokens[i]['roles'] = [role_ids[0]]
-
- i = len(project_tokens)
- project_tokens.append(_sample_blank_token())
- project_tokens[i]['user_id'] = user_ids[0]
- project_tokens[i]['project_id'] = project_ids[1]
- project_tokens[i]['roles'] = [role_ids[0]]
-
- token_to_revoke = _sample_blank_token()
- token_to_revoke['user_id'] = user_ids[0]
- token_to_revoke['project_id'] = project_ids[0]
- token_to_revoke['roles'] = [role_ids[0]]
-
- self.project_tokens = project_tokens
- self.user_ids = user_ids
- self.project_ids = project_ids
- self.role_ids = role_ids
- self.token_to_revoke = token_to_revoke
-
- def _assertTokenRevoked(self, token_data):
- self.assertTrue(any([_matches(e, token_data) for e in self.events]))
- return self.assertTrue(self.tree.is_revoked(token_data),
- 'Token should be revoked')
-
- def _assertTokenNotRevoked(self, token_data):
- self.assertFalse(any([_matches(e, token_data) for e in self.events]))
- return self.assertFalse(self.tree.is_revoked(token_data),
- 'Token should not be revoked')
-
- def _revoke_by_user(self, user_id):
- return self.tree.add_event(
- revoke_model.RevokeEvent(user_id=user_id))
-
- def _revoke_by_audit_id(self, audit_id):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(audit_id=audit_id))
- self.events.append(event)
- return event
-
- def _revoke_by_audit_chain_id(self, audit_chain_id, project_id=None,
- domain_id=None):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(audit_chain_id=audit_chain_id,
- project_id=project_id,
- domain_id=domain_id)
- )
- self.events.append(event)
- return event
-
- def _revoke_by_expiration(self, user_id, expires_at, project_id=None,
- domain_id=None):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(user_id=user_id,
- expires_at=expires_at,
- project_id=project_id,
- domain_id=domain_id))
- self.events.append(event)
- return event
-
- def _revoke_by_grant(self, role_id, user_id=None,
- domain_id=None, project_id=None):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(user_id=user_id,
- role_id=role_id,
- domain_id=domain_id,
- project_id=project_id))
- self.events.append(event)
- return event
-
- def _revoke_by_user_and_project(self, user_id, project_id):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(project_id=project_id,
- user_id=user_id))
- self.events.append(event)
- return event
-
- def _revoke_by_project_role_assignment(self, project_id, role_id):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(project_id=project_id,
- role_id=role_id))
- self.events.append(event)
- return event
-
- def _revoke_by_domain_role_assignment(self, domain_id, role_id):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(domain_id=domain_id,
- role_id=role_id))
- self.events.append(event)
- return event
-
- def _revoke_by_domain(self, domain_id):
- event = self.tree.add_event(
- revoke_model.RevokeEvent(domain_id=domain_id))
- self.events.append(event)
-
- def _user_field_test(self, field_name):
- user_id = _new_id()
- event = self._revoke_by_user(user_id)
- self.events.append(event)
- token_data_u1 = _sample_blank_token()
- token_data_u1[field_name] = user_id
- self._assertTokenRevoked(token_data_u1)
- token_data_u2 = _sample_blank_token()
- token_data_u2[field_name] = _new_id()
- self._assertTokenNotRevoked(token_data_u2)
- self.tree.remove_event(event)
- self.events.remove(event)
- self._assertTokenNotRevoked(token_data_u1)
-
- def test_revoke_by_user(self):
- self._user_field_test('user_id')
-
- def test_revoke_by_user_matches_trustee(self):
- self._user_field_test('trustee_id')
-
- def test_revoke_by_user_matches_trustor(self):
- self._user_field_test('trustor_id')
-
- def test_by_user_expiration(self):
- future_time = _future_time()
-
- user_id = 1
- event = self._revoke_by_expiration(user_id, future_time)
- token_data_1 = _sample_blank_token()
- token_data_1['user_id'] = user_id
- token_data_1['expires_at'] = future_time.replace(microsecond=0)
- self._assertTokenRevoked(token_data_1)
-
- token_data_2 = _sample_blank_token()
- token_data_2['user_id'] = user_id
- expire_delta = datetime.timedelta(seconds=2000)
- future_time = timeutils.utcnow() + expire_delta
- token_data_2['expires_at'] = future_time
- self._assertTokenNotRevoked(token_data_2)
-
- self.remove_event(event)
- self._assertTokenNotRevoked(token_data_1)
-
- def test_revoke_by_audit_id(self):
- audit_id = provider.audit_info(parent_audit_id=None)[0]
- token_data_1 = _sample_blank_token()
- # Audit ID and Audit Chain ID are populated with the same value
- # if the token is an original token
- token_data_1['audit_id'] = audit_id
- token_data_1['audit_chain_id'] = audit_id
- event = self._revoke_by_audit_id(audit_id)
- self._assertTokenRevoked(token_data_1)
-
- audit_id_2 = provider.audit_info(parent_audit_id=audit_id)[0]
- token_data_2 = _sample_blank_token()
- token_data_2['audit_id'] = audit_id_2
- token_data_2['audit_chain_id'] = audit_id
- self._assertTokenNotRevoked(token_data_2)
-
- self.remove_event(event)
- self._assertTokenNotRevoked(token_data_1)
-
- def test_revoke_by_audit_chain_id(self):
- audit_id = provider.audit_info(parent_audit_id=None)[0]
- token_data_1 = _sample_blank_token()
- # Audit ID and Audit Chain ID are populated with the same value
- # if the token is an original token
- token_data_1['audit_id'] = audit_id
- token_data_1['audit_chain_id'] = audit_id
- event = self._revoke_by_audit_chain_id(audit_id)
- self._assertTokenRevoked(token_data_1)
-
- audit_id_2 = provider.audit_info(parent_audit_id=audit_id)[0]
- token_data_2 = _sample_blank_token()
- token_data_2['audit_id'] = audit_id_2
- token_data_2['audit_chain_id'] = audit_id
- self._assertTokenRevoked(token_data_2)
-
- self.remove_event(event)
- self._assertTokenNotRevoked(token_data_1)
- self._assertTokenNotRevoked(token_data_2)
-
- def test_by_user_project(self):
- # When a user has a project-scoped token and the project-scoped token
- # is revoked then the token is revoked.
-
- user_id = _new_id()
- project_id = _new_id()
-
- future_time = _future_time()
-
- token_data = _sample_blank_token()
- token_data['user_id'] = user_id
- token_data['project_id'] = project_id
- token_data['expires_at'] = future_time.replace(microsecond=0)
-
- self._revoke_by_expiration(user_id, future_time, project_id=project_id)
- self._assertTokenRevoked(token_data)
-
- def test_by_user_domain(self):
- # When a user has a domain-scoped token and the domain-scoped token
- # is revoked then the token is revoked.
-
- user_id = _new_id()
- domain_id = _new_id()
-
- future_time = _future_time()
-
- token_data = _sample_blank_token()
- token_data['user_id'] = user_id
- token_data['assignment_domain_id'] = domain_id
- token_data['expires_at'] = future_time.replace(microsecond=0)
-
- self._revoke_by_expiration(user_id, future_time, domain_id=domain_id)
- self._assertTokenRevoked(token_data)
-
- def remove_event(self, event):
- self.events.remove(event)
- self.tree.remove_event(event)
-
- def test_by_project_grant(self):
- token_to_revoke = self.token_to_revoke
- tokens = self.project_tokens
-
- self._assertTokenNotRevoked(token_to_revoke)
- for token in tokens:
- self._assertTokenNotRevoked(token)
-
- event = self._revoke_by_grant(role_id=self.role_ids[0],
- user_id=self.user_ids[0],
- project_id=self.project_ids[0])
-
- self._assertTokenRevoked(token_to_revoke)
- for token in tokens:
- self._assertTokenNotRevoked(token)
-
- self.remove_event(event)
-
- self._assertTokenNotRevoked(token_to_revoke)
- for token in tokens:
- self._assertTokenNotRevoked(token)
-
- token_to_revoke['roles'] = [self.role_ids[0],
- self.role_ids[1],
- self.role_ids[2]]
-
- event = self._revoke_by_grant(role_id=self.role_ids[0],
- user_id=self.user_ids[0],
- project_id=self.project_ids[0])
- self._assertTokenRevoked(token_to_revoke)
- self.remove_event(event)
- self._assertTokenNotRevoked(token_to_revoke)
-
- event = self._revoke_by_grant(role_id=self.role_ids[1],
- user_id=self.user_ids[0],
- project_id=self.project_ids[0])
- self._assertTokenRevoked(token_to_revoke)
- self.remove_event(event)
- self._assertTokenNotRevoked(token_to_revoke)
-
- self._revoke_by_grant(role_id=self.role_ids[0],
- user_id=self.user_ids[0],
- project_id=self.project_ids[0])
- self._revoke_by_grant(role_id=self.role_ids[1],
- user_id=self.user_ids[0],
- project_id=self.project_ids[0])
- self._revoke_by_grant(role_id=self.role_ids[2],
- user_id=self.user_ids[0],
- project_id=self.project_ids[0])
- self._assertTokenRevoked(token_to_revoke)
-
- def test_by_project_and_user_and_role(self):
- user_id1 = _new_id()
- user_id2 = _new_id()
- project_id = _new_id()
- self.events.append(self._revoke_by_user(user_id1))
- self.events.append(
- self._revoke_by_user_and_project(user_id2, project_id))
- token_data = _sample_blank_token()
- token_data['user_id'] = user_id2
- token_data['project_id'] = project_id
- self._assertTokenRevoked(token_data)
-
- def test_by_domain_user(self):
- # If revoke a domain, then a token for a user in the domain is revoked
-
- user_id = _new_id()
- domain_id = _new_id()
-
- token_data = _sample_blank_token()
- token_data['user_id'] = user_id
- token_data['identity_domain_id'] = domain_id
-
- self._revoke_by_domain(domain_id)
-
- self._assertTokenRevoked(token_data)
-
- def test_by_domain_project(self):
- # If revoke a domain, then a token scoped to a project in the domain
- # is revoked.
-
- user_id = _new_id()
- user_domain_id = _new_id()
-
- project_id = _new_id()
- project_domain_id = _new_id()
-
- token_data = _sample_blank_token()
- token_data['user_id'] = user_id
- token_data['identity_domain_id'] = user_domain_id
- token_data['project_id'] = project_id
- token_data['assignment_domain_id'] = project_domain_id
-
- self._revoke_by_domain(project_domain_id)
-
- self._assertTokenRevoked(token_data)
-
- def test_by_domain_domain(self):
- # If revoke a domain, then a token scoped to the domain is revoked.
-
- user_id = _new_id()
- user_domain_id = _new_id()
-
- domain_id = _new_id()
-
- token_data = _sample_blank_token()
- token_data['user_id'] = user_id
- token_data['identity_domain_id'] = user_domain_id
- token_data['assignment_domain_id'] = domain_id
-
- self._revoke_by_domain(domain_id)
-
- self._assertTokenRevoked(token_data)
-
- def _assertEmpty(self, collection):
- return self.assertEqual(0, len(collection), "collection not empty")
-
- def _assertEventsMatchIteration(self, turn):
- self.assertEqual(1, len(self.tree.revoke_map))
- self.assertEqual(turn + 1, len(self.tree.revoke_map
- ['trust_id=*']
- ['consumer_id=*']
- ['access_token_id=*']
- ['audit_id=*']
- ['audit_chain_id=*']))
- # two different functions add domain_ids, +1 for None
- self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
- ['trust_id=*']
- ['consumer_id=*']
- ['access_token_id=*']
- ['audit_id=*']
- ['audit_chain_id=*']
- ['expires_at=*']))
- # two different functions add project_ids, +1 for None
- self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
- ['trust_id=*']
- ['consumer_id=*']
- ['access_token_id=*']
- ['audit_id=*']
- ['audit_chain_id=*']
- ['expires_at=*']
- ['domain_id=*']))
- # 10 users added
- self.assertEqual(turn, len(self.tree.revoke_map
- ['trust_id=*']
- ['consumer_id=*']
- ['access_token_id=*']
- ['audit_id=*']
- ['audit_chain_id=*']
- ['expires_at=*']
- ['domain_id=*']
- ['project_id=*']))
-
- def test_cleanup(self):
- events = self.events
- self._assertEmpty(self.tree.revoke_map)
- expiry_base_time = _future_time()
- for i in range(0, 10):
- events.append(
- self._revoke_by_user(_new_id()))
-
- args = (_new_id(),
- expiry_base_time + datetime.timedelta(seconds=i))
- events.append(
- self._revoke_by_expiration(*args))
-
- self.assertEqual(i + 2, len(self.tree.revoke_map
- ['trust_id=*']
- ['consumer_id=*']
- ['access_token_id=*']
- ['audit_id=*']
- ['audit_chain_id=*']),
- 'adding %s to %s' % (args,
- self.tree.revoke_map))
-
- events.append(
- self._revoke_by_project_role_assignment(_new_id(), _new_id()))
- events.append(
- self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
- events.append(
- self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
- events.append(
- self._revoke_by_user_and_project(_new_id(), _new_id()))
- self._assertEventsMatchIteration(i + 1)
-
- for event in self.events:
- self.tree.remove_event(event)
- self._assertEmpty(self.tree.revoke_map)
diff --git a/keystone-moon/keystone/tests/unit/test_singular_plural.py b/keystone-moon/keystone/tests/unit/test_singular_plural.py
deleted file mode 100644
index b07ea8d5..00000000
--- a/keystone-moon/keystone/tests/unit/test_singular_plural.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2012 Red Hat, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import ast
-
-from keystone.contrib.admin_crud import core as admin_crud_core
-from keystone.contrib.s3 import core as s3_core
-from keystone.contrib.user_crud import core as user_crud_core
-from keystone.identity import core as identity_core
-from keystone import service
-
-
-class TestSingularPlural(object):
- def test_keyword_arg_condition_or_methods(self):
- """Raise if we see a keyword arg called 'condition' or 'methods'."""
- modules = [admin_crud_core, s3_core,
- user_crud_core, identity_core, service]
- for module in modules:
- filename = module.__file__
- if filename.endswith(".pyc"):
- # In Python 2, the .py and .pyc files are in the same dir.
- filename = filename[:-1]
- with open(filename) as fil:
- source = fil.read()
- module = ast.parse(source, filename)
- last_stmt_or_expr = None
- for node in ast.walk(module):
- if isinstance(node, ast.stmt) or isinstance(node, ast.expr):
- # keyword nodes don't have line numbers, so we need to
- # get that information from the parent stmt or expr.
- last_stmt_or_expr = node
- elif isinstance(node, ast.keyword):
- for bad_word in ["condition", "methods"]:
- if node.arg == bad_word:
- raise AssertionError(
- "Suspicious name '%s' at %s line %s" %
- (bad_word, filename, last_stmt_or_expr.lineno))
diff --git a/keystone-moon/keystone/tests/unit/test_sql_livetest.py b/keystone-moon/keystone/tests/unit/test_sql_livetest.py
deleted file mode 100644
index 18b8ea91..00000000
--- a/keystone-moon/keystone/tests/unit/test_sql_livetest.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2013 Red Hat, Inc
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.tests import unit
-from keystone.tests.unit import test_sql_upgrade
-
-
-class PostgresqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
- def setUp(self):
- self.skip_if_env_not_set('ENABLE_LIVE_POSTGRES_TEST')
- super(PostgresqlMigrateTests, self).setUp()
-
- def config_files(self):
- files = super(PostgresqlMigrateTests, self).config_files()
- files.append(unit.dirs.tests_conf("backend_postgresql.conf"))
- return files
-
-
-class MysqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
- def setUp(self):
- self.skip_if_env_not_set('ENABLE_LIVE_MYSQL_TEST')
- super(MysqlMigrateTests, self).setUp()
-
- def config_files(self):
- files = super(MysqlMigrateTests, self).config_files()
- files.append(unit.dirs.tests_conf("backend_mysql.conf"))
- return files
-
-
-class Db2MigrateTests(test_sql_upgrade.SqlUpgradeTests):
- def setUp(self):
- self.skip_if_env_not_set('ENABLE_LIVE_DB2_TEST')
- super(Db2MigrateTests, self).setUp()
-
- def config_files(self):
- files = super(Db2MigrateTests, self).config_files()
- files.append(unit.dirs.tests_conf("backend_db2.conf"))
- return files
diff --git a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
deleted file mode 100644
index 0155f787..00000000
--- a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""
-To run these tests against a live database:
-
-1. Modify the file `keystone/tests/unit/config_files/backend_sql.conf` to use
- the connection for your live database.
-2. Set up a blank, live database.
-3. Run the tests using::
-
- tox -e py27 -- keystone.tests.unit.test_sql_migrate_extensions
-
-WARNING::
-
- Your database will be wiped.
-
- Do not do this against a Database with valuable data as
- all data will be lost.
-"""
-
-from keystone.contrib import endpoint_filter
-from keystone.contrib import endpoint_policy
-from keystone.contrib import federation
-from keystone.contrib import oauth1
-from keystone.contrib import revoke
-from keystone import exception
-from keystone.tests.unit import test_sql_upgrade
-
-
-class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase):
-
- OAUTH1_MIGRATIONS = 5
-
- def repo_package(self):
- return oauth1
-
- def test_upgrade(self):
- for version in range(self.OAUTH1_MIGRATIONS):
- v = version + 1
- self.assertRaises(exception.MigrationMovedFailure,
- self.upgrade, version=v,
- repository=self.repo_path)
-
-
-class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase):
-
- ENDPOINT_FILTER_MIGRATIONS = 2
-
- def repo_package(self):
- return endpoint_filter
-
- def test_upgrade(self):
- for version in range(self.ENDPOINT_FILTER_MIGRATIONS):
- v = version + 1
- self.assertRaises(exception.MigrationMovedFailure,
- self.upgrade, version=v,
- repository=self.repo_path)
-
-
-class EndpointPolicyExtension(test_sql_upgrade.SqlMigrateBase):
-
- ENDPOINT_POLICY_MIGRATIONS = 1
-
- def repo_package(self):
- return endpoint_policy
-
- def test_upgrade(self):
- self.assertRaises(exception.MigrationMovedFailure,
- self.upgrade,
- version=self.ENDPOINT_POLICY_MIGRATIONS,
- repository=self.repo_path)
-
-
-class FederationExtension(test_sql_upgrade.SqlMigrateBase):
-
- FEDERATION_MIGRATIONS = 8
-
- def repo_package(self):
- return federation
-
- def test_upgrade(self):
- for version in range(self.FEDERATION_MIGRATIONS):
- v = version + 1
- self.assertRaises(exception.MigrationMovedFailure,
- self.upgrade, version=v,
- repository=self.repo_path)
-
-
-class RevokeExtension(test_sql_upgrade.SqlMigrateBase):
-
- REVOKE_MIGRATIONS = 2
-
- def repo_package(self):
- return revoke
-
- def test_upgrade(self):
- for version in range(self.REVOKE_MIGRATIONS):
- v = version + 1
- self.assertRaises(exception.MigrationMovedFailure,
- self.upgrade, version=v,
- repository=self.repo_path)
diff --git a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
deleted file mode 100644
index 5ca12f66..00000000
--- a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
+++ /dev/null
@@ -1,1195 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-"""
-To run these tests against a live database:
-
-1. Modify the file ``keystone/tests/unit/config_files/backend_sql.conf`` to use
- the connection for your live database.
-2. Set up a blank, live database
-3. Run the tests using::
-
- tox -e py27 -- keystone.tests.unit.test_sql_upgrade
-
-WARNING::
-
- Your database will be wiped.
-
- Do not do this against a database with valuable data as
- all data will be lost.
-"""
-
-import json
-import uuid
-
-import migrate
-from migrate.versioning import api as versioning_api
-from migrate.versioning import repository
-import mock
-from oslo_config import cfg
-from oslo_db import exception as db_exception
-from oslo_db.sqlalchemy import migration
-from oslo_db.sqlalchemy import session as db_session
-from sqlalchemy.engine import reflection
-import sqlalchemy.exc
-from sqlalchemy import schema
-from testtools import matchers
-
-from keystone.common import sql
-from keystone.common.sql import migration_helpers
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-
-# NOTE(morganfainberg): This should be updated when each DB migration collapse
-# is done to mirror the expected structure of the DB in the format of
-# { <DB_TABLE_NAME>: [<COLUMN>, <COLUMN>, ...], ... }
-INITIAL_TABLE_STRUCTURE = {
- 'credential': [
- 'id', 'user_id', 'project_id', 'blob', 'type', 'extra',
- ],
- 'domain': [
- 'id', 'name', 'enabled', 'extra',
- ],
- 'endpoint': [
- 'id', 'legacy_endpoint_id', 'interface', 'region_id', 'service_id',
- 'url', 'enabled', 'extra',
- ],
- 'group': [
- 'id', 'domain_id', 'name', 'description', 'extra',
- ],
- 'policy': [
- 'id', 'type', 'blob', 'extra',
- ],
- 'project': [
- 'id', 'name', 'extra', 'description', 'enabled', 'domain_id',
- 'parent_id',
- ],
- 'role': [
- 'id', 'name', 'extra',
- ],
- 'service': [
- 'id', 'type', 'extra', 'enabled',
- ],
- 'token': [
- 'id', 'expires', 'extra', 'valid', 'trust_id', 'user_id',
- ],
- 'trust': [
- 'id', 'trustor_user_id', 'trustee_user_id', 'project_id',
- 'impersonation', 'deleted_at', 'expires_at', 'remaining_uses', 'extra',
- ],
- 'trust_role': [
- 'trust_id', 'role_id',
- ],
- 'user': [
- 'id', 'name', 'extra', 'password', 'enabled', 'domain_id',
- 'default_project_id',
- ],
- 'user_group_membership': [
- 'user_id', 'group_id',
- ],
- 'region': [
- 'id', 'description', 'parent_region_id', 'extra',
- ],
- 'assignment': [
- 'type', 'actor_id', 'target_id', 'role_id', 'inherited',
- ],
- 'id_mapping': [
- 'public_id', 'domain_id', 'local_id', 'entity_type',
- ],
- 'whitelisted_config': [
- 'domain_id', 'group', 'option', 'value',
- ],
- 'sensitive_config': [
- 'domain_id', 'group', 'option', 'value',
- ],
-}
-
-
-# Test migration_helpers.get_init_version separately to ensure it works before
-# using in the SqlUpgrade tests.
-class MigrationHelpersGetInitVersionTests(unit.TestCase):
- @mock.patch.object(repository, 'Repository')
- def test_get_init_version_no_path(self, repo):
- migrate_versions = mock.MagicMock()
- # make a version list starting with zero. `get_init_version` will
- # return None for this value.
- migrate_versions.versions.versions = list(range(0, 5))
- repo.return_value = migrate_versions
-
- # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid
- # an exception.
- with mock.patch('os.path.isdir', return_value=True):
- # since 0 is the smallest version expect None
- version = migration_helpers.get_init_version()
- self.assertIsNone(version)
-
- # check that the default path was used as the first argument to the
- # first invocation of repo. Cannot match the full path because it is
- # based on where the test is run.
- param = repo.call_args_list[0][0][0]
- self.assertTrue(param.endswith('/sql/migrate_repo'))
-
- @mock.patch.object(repository, 'Repository')
- def test_get_init_version_with_path_initial_version_0(self, repo):
- migrate_versions = mock.MagicMock()
- # make a version list starting with zero. `get_init_version` will
- # return None for this value.
- migrate_versions.versions.versions = list(range(0, 5))
- repo.return_value = migrate_versions
-
- # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid
- # an exception.
- with mock.patch('os.path.isdir', return_value=True):
- path = '/keystone/migrate_repo/'
-
- # since 0 is the smallest version expect None
- version = migration_helpers.get_init_version(abs_path=path)
- self.assertIsNone(version)
-
- @mock.patch.object(repository, 'Repository')
- def test_get_init_version_with_path(self, repo):
- initial_version = 10
-
- migrate_versions = mock.MagicMock()
- migrate_versions.versions.versions = list(range(initial_version + 1,
- initial_version + 5))
- repo.return_value = migrate_versions
-
- # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid
- # an exception.
- with mock.patch('os.path.isdir', return_value=True):
- path = '/keystone/migrate_repo/'
-
- version = migration_helpers.get_init_version(abs_path=path)
- self.assertEqual(initial_version, version)
-
-
-class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase):
- # override this in subclasses. The default of zero covers tests such
- # as extensions upgrades.
- _initial_db_version = 0
-
- def initialize_sql(self):
- self.metadata = sqlalchemy.MetaData()
- self.metadata.bind = self.engine
-
- def config_files(self):
- config_files = super(SqlMigrateBase, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def repo_package(self):
- return sql
-
- def setUp(self):
- super(SqlMigrateBase, self).setUp()
- self.load_backends()
- database.initialize_sql_session()
- conn_str = CONF.database.connection
- if (conn_str != unit.IN_MEM_DB_CONN_STRING and
- conn_str.startswith('sqlite') and
- conn_str[10:] == unit.DEFAULT_TEST_DB_FILE):
- # Override the default with a DB that is specific to the migration
- # tests only if the DB Connection string is the same as the global
- # default. This is required so that no conflicts occur due to the
- # global default DB already being under migrate control. This is
- # only needed if the DB is not-in-memory
- db_file = unit.dirs.tmp('keystone_migrate_test.db')
- self.config_fixture.config(
- group='database',
- connection='sqlite:///%s' % db_file)
-
- # create and share a single sqlalchemy engine for testing
- with sql.session_for_write() as session:
- self.engine = session.get_bind()
- self.addCleanup(self.cleanup_instance('engine'))
- self.Session = db_session.get_maker(self.engine, autocommit=False)
- self.addCleanup(sqlalchemy.orm.session.Session.close_all)
-
- self.initialize_sql()
- self.repo_path = migration_helpers.find_migrate_repo(
- self.repo_package())
- self.schema = versioning_api.ControlledSchema.create(
- self.engine,
- self.repo_path,
- self._initial_db_version)
-
- # auto-detect the highest available schema version in the migrate_repo
- self.max_version = self.schema.repository.version().version
-
- self.addCleanup(sql.cleanup)
-
- # drop tables and FKs.
- self.addCleanup(self._cleanupDB)
-
- def _cleanupDB(self):
- meta = sqlalchemy.MetaData()
- meta.bind = self.engine
- meta.reflect(self.engine)
-
- with self.engine.begin() as conn:
- inspector = reflection.Inspector.from_engine(self.engine)
- metadata = schema.MetaData()
- tbs = []
- all_fks = []
-
- for table_name in inspector.get_table_names():
- fks = []
- for fk in inspector.get_foreign_keys(table_name):
- if not fk['name']:
- continue
- fks.append(
- schema.ForeignKeyConstraint((), (), name=fk['name']))
- table = schema.Table(table_name, metadata, *fks)
- tbs.append(table)
- all_fks.extend(fks)
-
- for fkc in all_fks:
- if self.engine.name != 'sqlite':
- conn.execute(schema.DropConstraint(fkc))
-
- for table in tbs:
- conn.execute(schema.DropTable(table))
-
- def select_table(self, name):
- table = sqlalchemy.Table(name,
- self.metadata,
- autoload=True)
- s = sqlalchemy.select([table])
- return s
-
- def assertTableExists(self, table_name):
- try:
- self.select_table(table_name)
- except sqlalchemy.exc.NoSuchTableError:
- raise AssertionError('Table "%s" does not exist' % table_name)
-
- def assertTableDoesNotExist(self, table_name):
- """Asserts that a given table exists cannot be selected by name."""
- # Switch to a different metadata otherwise you might still
- # detect renamed or dropped tables
- try:
- temp_metadata = sqlalchemy.MetaData()
- temp_metadata.bind = self.engine
- sqlalchemy.Table(table_name, temp_metadata, autoload=True)
- except sqlalchemy.exc.NoSuchTableError:
- pass
- else:
- raise AssertionError('Table "%s" already exists' % table_name)
-
- def assertTableCountsMatch(self, table1_name, table2_name):
- try:
- table1 = self.select_table(table1_name)
- except sqlalchemy.exc.NoSuchTableError:
- raise AssertionError('Table "%s" does not exist' % table1_name)
- try:
- table2 = self.select_table(table2_name)
- except sqlalchemy.exc.NoSuchTableError:
- raise AssertionError('Table "%s" does not exist' % table2_name)
- session = self.Session()
- table1_count = session.execute(table1.count()).scalar()
- table2_count = session.execute(table2.count()).scalar()
- if table1_count != table2_count:
- raise AssertionError('Table counts do not match: {0} ({1}), {2} '
- '({3})'.format(table1_name, table1_count,
- table2_name, table2_count))
-
- def upgrade(self, *args, **kwargs):
- self._migrate(*args, **kwargs)
-
- def _migrate(self, version, repository=None, downgrade=False,
- current_schema=None):
- repository = repository or self.repo_path
- err = ''
- version = versioning_api._migrate_version(self.schema,
- version,
- not downgrade,
- err)
- if not current_schema:
- current_schema = self.schema
- changeset = current_schema.changeset(version)
- for ver, change in changeset:
- self.schema.runchange(ver, change, changeset.step)
- self.assertEqual(self.schema.version, version)
-
- def assertTableColumns(self, table_name, expected_cols):
- """Asserts that the table contains the expected set of columns."""
- self.initialize_sql()
- table = self.select_table(table_name)
- actual_cols = [col.name for col in table.columns]
- # Check if the columns are equal, but allow for a different order,
- # which might occur after an upgrade followed by a downgrade
- self.assertItemsEqual(expected_cols, actual_cols,
- '%s table' % table_name)
-
-
-class SqlUpgradeTests(SqlMigrateBase):
- _initial_db_version = migration_helpers.get_init_version()
-
- def test_blank_db_to_start(self):
- self.assertTableDoesNotExist('user')
-
- def test_start_version_db_init_version(self):
- with sql.session_for_write() as session:
- version = migration.db_version(session.get_bind(), self.repo_path,
- self._initial_db_version)
- self.assertEqual(
- self._initial_db_version,
- version,
- 'DB is not at version %s' % self._initial_db_version)
-
- def test_upgrade_add_initial_tables(self):
- self.upgrade(self._initial_db_version + 1)
- self.check_initial_table_structure()
-
- def check_initial_table_structure(self):
- for table in INITIAL_TABLE_STRUCTURE:
- self.assertTableColumns(table, INITIAL_TABLE_STRUCTURE[table])
-
- def insert_dict(self, session, table_name, d, table=None):
- """Naively inserts key-value pairs into a table, given a dictionary."""
- if table is None:
- this_table = sqlalchemy.Table(table_name, self.metadata,
- autoload=True)
- else:
- this_table = table
- insert = this_table.insert().values(**d)
- session.execute(insert)
- session.commit()
-
- def test_kilo_squash(self):
- self.upgrade(67)
-
- # In 053 the size of ID and parent region ID columns were changed
- table = sqlalchemy.Table('region', self.metadata, autoload=True)
- self.assertEqual(255, table.c.id.type.length)
- self.assertEqual(255, table.c.parent_region_id.type.length)
- table = sqlalchemy.Table('endpoint', self.metadata, autoload=True)
- self.assertEqual(255, table.c.region_id.type.length)
-
- # In 054 an index was created for the actor_id of the assignment table
- table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
- index_data = [(idx.name, list(idx.columns.keys()))
- for idx in table.indexes]
- self.assertIn(('ix_actor_id', ['actor_id']), index_data)
-
- # In 055 indexes were created for user and trust IDs in the token table
- table = sqlalchemy.Table('token', self.metadata, autoload=True)
- index_data = [(idx.name, list(idx.columns.keys()))
- for idx in table.indexes]
- self.assertIn(('ix_token_user_id', ['user_id']), index_data)
- self.assertIn(('ix_token_trust_id', ['trust_id']), index_data)
-
- # In 062 the role ID foreign key was removed from the assignment table
- if self.engine.name == "mysql":
- self.assertFalse(self.does_fk_exist('assignment', 'role_id'))
-
- # In 064 the domain ID FK was removed from the group and user tables
- if self.engine.name != 'sqlite':
- # sqlite does not support FK deletions (or enforcement)
- self.assertFalse(self.does_fk_exist('group', 'domain_id'))
- self.assertFalse(self.does_fk_exist('user', 'domain_id'))
-
- # In 067 the role ID index was removed from the assignment table
- if self.engine.name == "mysql":
- self.assertFalse(self._does_index_exist('assignment',
- 'assignment_role_id_fkey'))
-
- def test_insert_assignment_inherited_pk(self):
- ASSIGNMENT_TABLE_NAME = 'assignment'
- INHERITED_COLUMN_NAME = 'inherited'
- ROLE_TABLE_NAME = 'role'
-
- self.upgrade(72)
-
- # Check that the 'inherited' column is not part of the PK
- self.assertFalse(self.does_pk_exist(ASSIGNMENT_TABLE_NAME,
- INHERITED_COLUMN_NAME))
-
- session = self.Session()
-
- role = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.insert_dict(session, ROLE_TABLE_NAME, role)
-
- # Create both inherited and noninherited role assignments
- inherited = {'type': 'UserProject',
- 'actor_id': uuid.uuid4().hex,
- 'target_id': uuid.uuid4().hex,
- 'role_id': role['id'],
- 'inherited': True}
-
- noninherited = inherited.copy()
- noninherited['inherited'] = False
-
- # Create another inherited role assignment as a spoiler
- spoiler = inherited.copy()
- spoiler['actor_id'] = uuid.uuid4().hex
-
- self.insert_dict(session, ASSIGNMENT_TABLE_NAME, inherited)
- self.insert_dict(session, ASSIGNMENT_TABLE_NAME, spoiler)
-
- # Since 'inherited' is not part of the PK, we can't insert noninherited
- self.assertRaises(db_exception.DBDuplicateEntry,
- self.insert_dict,
- session,
- ASSIGNMENT_TABLE_NAME,
- noninherited)
-
- session.close()
-
- self.upgrade(73)
-
- session = self.Session()
- self.metadata.clear()
-
- # Check that the 'inherited' column is now part of the PK
- self.assertTrue(self.does_pk_exist(ASSIGNMENT_TABLE_NAME,
- INHERITED_COLUMN_NAME))
-
- # The noninherited role assignment can now be inserted
- self.insert_dict(session, ASSIGNMENT_TABLE_NAME, noninherited)
-
- assignment_table = sqlalchemy.Table(ASSIGNMENT_TABLE_NAME,
- self.metadata,
- autoload=True)
-
- assignments = session.query(assignment_table).all()
- for assignment in (inherited, spoiler, noninherited):
- self.assertIn((assignment['type'], assignment['actor_id'],
- assignment['target_id'], assignment['role_id'],
- assignment['inherited']),
- assignments)
-
- def does_pk_exist(self, table, pk_column):
- """Checks whether a column is primary key on a table."""
- inspector = reflection.Inspector.from_engine(self.engine)
- pk_columns = inspector.get_pk_constraint(table)['constrained_columns']
-
- return pk_column in pk_columns
-
- def does_fk_exist(self, table, fk_column):
- inspector = reflection.Inspector.from_engine(self.engine)
- for fk in inspector.get_foreign_keys(table):
- if fk_column in fk['constrained_columns']:
- return True
- return False
-
- def does_index_exist(self, table_name, index_name):
- meta = sqlalchemy.MetaData(bind=self.engine)
- table = sqlalchemy.Table(table_name, meta, autoload=True)
- return index_name in [idx.name for idx in table.indexes]
-
- def does_constraint_exist(self, table_name, constraint_name):
- meta = sqlalchemy.MetaData(bind=self.engine)
- table = sqlalchemy.Table(table_name, meta, autoload=True)
- return constraint_name in [con.name for con in table.constraints]
-
- def test_endpoint_policy_upgrade(self):
- self.assertTableDoesNotExist('policy_association')
- self.upgrade(81)
- self.assertTableColumns('policy_association',
- ['id', 'policy_id', 'endpoint_id',
- 'service_id', 'region_id'])
-
- @mock.patch.object(migration_helpers, 'get_db_version', return_value=1)
- def test_endpoint_policy_already_migrated(self, mock_ep):
-
- # By setting the return value to 1, the migration has already been
- # run, and there's no need to create the table again
-
- self.upgrade(81)
-
- mock_ep.assert_called_once_with(extension='endpoint_policy',
- engine=mock.ANY)
-
- # It won't exist because we are mocking it, but we can verify
- # that 081 did not create the table
- self.assertTableDoesNotExist('policy_association')
-
- def test_create_federation_tables(self):
- self.identity_provider = 'identity_provider'
- self.federation_protocol = 'federation_protocol'
- self.service_provider = 'service_provider'
- self.mapping = 'mapping'
- self.remote_ids = 'idp_remote_ids'
-
- self.assertTableDoesNotExist(self.identity_provider)
- self.assertTableDoesNotExist(self.federation_protocol)
- self.assertTableDoesNotExist(self.service_provider)
- self.assertTableDoesNotExist(self.mapping)
- self.assertTableDoesNotExist(self.remote_ids)
-
- self.upgrade(82)
- self.assertTableColumns(self.identity_provider,
- ['id', 'description', 'enabled'])
-
- self.assertTableColumns(self.federation_protocol,
- ['id', 'idp_id', 'mapping_id'])
-
- self.assertTableColumns(self.mapping,
- ['id', 'rules'])
-
- self.assertTableColumns(self.service_provider,
- ['id', 'description', 'enabled', 'auth_url',
- 'relay_state_prefix', 'sp_url'])
-
- self.assertTableColumns(self.remote_ids, ['idp_id', 'remote_id'])
-
- federation_protocol = sqlalchemy.Table(self.federation_protocol,
- self.metadata,
- autoload=True)
- self.assertFalse(federation_protocol.c.mapping_id.nullable)
-
- sp_table = sqlalchemy.Table(self.service_provider,
- self.metadata,
- autoload=True)
- self.assertFalse(sp_table.c.auth_url.nullable)
- self.assertFalse(sp_table.c.sp_url.nullable)
-
- @mock.patch.object(migration_helpers, 'get_db_version', return_value=8)
- def test_federation_already_migrated(self, mock_federation):
-
- # By setting the return value to 8, the migration has already been
- # run, and there's no need to create the table again.
- self.upgrade(82)
-
- mock_federation.assert_any_call(extension='federation',
- engine=mock.ANY)
-
- # It won't exist because we are mocking it, but we can verify
- # that 082 did not create the table.
- self.assertTableDoesNotExist('identity_provider')
- self.assertTableDoesNotExist('federation_protocol')
- self.assertTableDoesNotExist('mapping')
- self.assertTableDoesNotExist('service_provider')
- self.assertTableDoesNotExist('idp_remote_ids')
-
- def test_create_oauth_tables(self):
- consumer = 'consumer'
- request_token = 'request_token'
- access_token = 'access_token'
- self.assertTableDoesNotExist(consumer)
- self.assertTableDoesNotExist(request_token)
- self.assertTableDoesNotExist(access_token)
- self.upgrade(83)
- self.assertTableColumns(consumer,
- ['id',
- 'description',
- 'secret',
- 'extra'])
- self.assertTableColumns(request_token,
- ['id',
- 'request_secret',
- 'verifier',
- 'authorizing_user_id',
- 'requested_project_id',
- 'role_ids',
- 'consumer_id',
- 'expires_at'])
- self.assertTableColumns(access_token,
- ['id',
- 'access_secret',
- 'authorizing_user_id',
- 'project_id',
- 'role_ids',
- 'consumer_id',
- 'expires_at'])
-
- @mock.patch.object(migration_helpers, 'get_db_version', return_value=5)
- def test_oauth1_already_migrated(self, mock_oauth1):
-
- # By setting the return value to 5, the migration has already been
- # run, and there's no need to create the table again.
- self.upgrade(83)
-
- mock_oauth1.assert_any_call(extension='oauth1', engine=mock.ANY)
-
- # It won't exist because we are mocking it, but we can verify
- # that 083 did not create the table.
- self.assertTableDoesNotExist('consumer')
- self.assertTableDoesNotExist('request_token')
- self.assertTableDoesNotExist('access_token')
-
- def test_create_revoke_table(self):
- self.assertTableDoesNotExist('revocation_event')
- self.upgrade(84)
- self.assertTableColumns('revocation_event',
- ['id', 'domain_id', 'project_id', 'user_id',
- 'role_id', 'trust_id', 'consumer_id',
- 'access_token_id', 'issued_before',
- 'expires_at', 'revoked_at',
- 'audit_chain_id', 'audit_id'])
-
- @mock.patch.object(migration_helpers, 'get_db_version', return_value=2)
- def test_revoke_already_migrated(self, mock_revoke):
-
- # By setting the return value to 2, the migration has already been
- # run, and there's no need to create the table again.
- self.upgrade(84)
-
- mock_revoke.assert_any_call(extension='revoke', engine=mock.ANY)
-
- # It won't exist because we are mocking it, but we can verify
- # that 084 did not create the table.
- self.assertTableDoesNotExist('revocation_event')
-
- def test_project_is_domain_upgrade(self):
- self.upgrade(74)
- self.assertTableColumns('project',
- ['id', 'name', 'extra', 'description',
- 'enabled', 'domain_id', 'parent_id',
- 'is_domain'])
-
- def test_implied_roles_upgrade(self):
- self.upgrade(87)
- self.assertTableColumns('implied_role',
- ['prior_role_id', 'implied_role_id'])
- self.assertTrue(self.does_fk_exist('implied_role', 'prior_role_id'))
- self.assertTrue(self.does_fk_exist('implied_role', 'implied_role_id'))
-
- def test_add_config_registration(self):
- config_registration = 'config_register'
- self.upgrade(74)
- self.assertTableDoesNotExist(config_registration)
- self.upgrade(75)
- self.assertTableColumns(config_registration, ['type', 'domain_id'])
-
- def test_endpoint_filter_upgrade(self):
- def assert_tables_columns_exist():
- self.assertTableColumns('project_endpoint',
- ['endpoint_id', 'project_id'])
- self.assertTableColumns('endpoint_group',
- ['id', 'name', 'description', 'filters'])
- self.assertTableColumns('project_endpoint_group',
- ['endpoint_group_id', 'project_id'])
-
- self.assertTableDoesNotExist('project_endpoint')
- self.upgrade(85)
- assert_tables_columns_exist()
-
- @mock.patch.object(migration_helpers, 'get_db_version', return_value=2)
- def test_endpoint_filter_already_migrated(self, mock_endpoint_filter):
-
- # By setting the return value to 2, the migration has already been
- # run, and there's no need to create the table again.
- self.upgrade(85)
-
- mock_endpoint_filter.assert_any_call(extension='endpoint_filter',
- engine=mock.ANY)
-
- # It won't exist because we are mocking it, but we can verify
- # that 085 did not create the table.
- self.assertTableDoesNotExist('project_endpoint')
- self.assertTableDoesNotExist('endpoint_group')
- self.assertTableDoesNotExist('project_endpoint_group')
-
- def test_add_trust_unique_constraint_upgrade(self):
- self.upgrade(86)
- inspector = reflection.Inspector.from_engine(self.engine)
- constraints = inspector.get_unique_constraints('trust')
- constraint_names = [constraint['name'] for constraint in constraints]
- self.assertIn('duplicate_trust_constraint', constraint_names)
-
- def test_add_domain_specific_roles(self):
- """Check database upgraded successfully for domain specific roles.
-
- The following items need to be checked:
-
- - The domain_id column has been added
- - That it has been added to the uniqueness constraints
- - Existing roles have their domain_id columns set to the specific
- string of '<<null>>'
-
- """
- NULL_DOMAIN_ID = '<<null>>'
-
- self.upgrade(87)
- session = self.Session()
- role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
- # Add a role before we upgrade, so we can check that its new domain_id
- # attribute is handled correctly
- role_id = uuid.uuid4().hex
- self.insert_dict(session, 'role',
- {'id': role_id, 'name': uuid.uuid4().hex})
- session.close()
-
- self.upgrade(88)
-
- session = self.Session()
- self.metadata.clear()
- self.assertTableColumns('role', ['id', 'name', 'domain_id', 'extra'])
- # Check the domain_id has been added to the uniqueness constraint
- inspector = reflection.Inspector.from_engine(self.engine)
- constraints = inspector.get_unique_constraints('role')
- constraint_columns = [
- constraint['column_names'] for constraint in constraints
- if constraint['name'] == 'ixu_role_name_domain_id']
- self.assertIn('domain_id', constraint_columns[0])
-
- # Now check our role has its domain_id attribute set correctly
- role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
- cols = [role_table.c.domain_id]
- filter = role_table.c.id == role_id
- statement = sqlalchemy.select(cols).where(filter)
- role_entry = session.execute(statement).fetchone()
- self.assertEqual(NULL_DOMAIN_ID, role_entry[0])
-
- def test_add_root_of_all_domains(self):
- NULL_DOMAIN_ID = '<<keystone.domain.root>>'
- self.upgrade(89)
- session = self.Session()
-
- domain_table = sqlalchemy.Table(
- 'domain', self.metadata, autoload=True)
- query = session.query(domain_table).filter_by(id=NULL_DOMAIN_ID)
- domain_from_db = query.one()
- self.assertIn(NULL_DOMAIN_ID, domain_from_db)
-
- project_table = sqlalchemy.Table(
- 'project', self.metadata, autoload=True)
- query = session.query(project_table).filter_by(id=NULL_DOMAIN_ID)
- project_from_db = query.one()
- self.assertIn(NULL_DOMAIN_ID, project_from_db)
-
- session.close()
-
- def test_add_local_user_and_password_tables(self):
- local_user_table = 'local_user'
- password_table = 'password'
- self.upgrade(89)
- self.assertTableDoesNotExist(local_user_table)
- self.assertTableDoesNotExist(password_table)
- self.upgrade(90)
- self.assertTableColumns(local_user_table,
- ['id',
- 'user_id',
- 'domain_id',
- 'name'])
- self.assertTableColumns(password_table,
- ['id',
- 'local_user_id',
- 'password'])
-
- def test_migrate_data_to_local_user_and_password_tables(self):
- def get_expected_users():
- expected_users = []
- for test_user in default_fixtures.USERS:
- user = {}
- user['id'] = uuid.uuid4().hex
- user['name'] = test_user['name']
- user['domain_id'] = test_user['domain_id']
- user['password'] = test_user['password']
- user['enabled'] = True
- user['extra'] = json.dumps(uuid.uuid4().hex)
- user['default_project_id'] = uuid.uuid4().hex
- expected_users.append(user)
- return expected_users
-
- def add_users_to_db(expected_users, user_table):
- for user in expected_users:
- ins = user_table.insert().values(
- {'id': user['id'],
- 'name': user['name'],
- 'domain_id': user['domain_id'],
- 'password': user['password'],
- 'enabled': user['enabled'],
- 'extra': user['extra'],
- 'default_project_id': user['default_project_id']})
- ins.execute()
-
- def get_users_from_db(user_table, local_user_table, password_table):
- sel = (
- sqlalchemy.select([user_table.c.id,
- user_table.c.enabled,
- user_table.c.extra,
- user_table.c.default_project_id,
- local_user_table.c.name,
- local_user_table.c.domain_id,
- password_table.c.password])
- .select_from(user_table.join(local_user_table,
- user_table.c.id ==
- local_user_table.c.user_id)
- .join(password_table,
- local_user_table.c.id ==
- password_table.c.local_user_id))
- )
- user_rows = sel.execute()
- users = []
- for row in user_rows:
- users.append(
- {'id': row['id'],
- 'name': row['name'],
- 'domain_id': row['domain_id'],
- 'password': row['password'],
- 'enabled': row['enabled'],
- 'extra': row['extra'],
- 'default_project_id': row['default_project_id']})
- return users
-
- meta = sqlalchemy.MetaData()
- meta.bind = self.engine
-
- user_table_name = 'user'
- local_user_table_name = 'local_user'
- password_table_name = 'password'
-
- # populate current user table
- self.upgrade(90)
- user_table = sqlalchemy.Table(user_table_name, meta, autoload=True)
- expected_users = get_expected_users()
- add_users_to_db(expected_users, user_table)
-
- # upgrade to migration and test
- self.upgrade(91)
- self.assertTableCountsMatch(user_table_name, local_user_table_name)
- self.assertTableCountsMatch(local_user_table_name, password_table_name)
- meta.clear()
- user_table = sqlalchemy.Table(user_table_name, meta, autoload=True)
- local_user_table = sqlalchemy.Table(local_user_table_name, meta,
- autoload=True)
- password_table = sqlalchemy.Table(password_table_name, meta,
- autoload=True)
- actual_users = get_users_from_db(user_table, local_user_table,
- password_table)
- self.assertListEqual(expected_users, actual_users)
-
- def test_migrate_user_with_null_password_to_password_tables(self):
- USER_TABLE_NAME = 'user'
- LOCAL_USER_TABLE_NAME = 'local_user'
- PASSWORD_TABLE_NAME = 'password'
- self.upgrade(90)
- user_ref = unit.new_user_ref(uuid.uuid4().hex)
- user_ref.pop('password')
- # pop extra attribute which doesn't recognized by SQL expression
- # layer.
- user_ref.pop('email')
- session = self.Session()
- self.insert_dict(session, USER_TABLE_NAME, user_ref)
- self.metadata.clear()
- self.upgrade(91)
- # migration should be successful.
- self.assertTableCountsMatch(USER_TABLE_NAME, LOCAL_USER_TABLE_NAME)
- # no new entry was added to the password table because the
- # user doesn't have a password.
- password_table = self.select_table(PASSWORD_TABLE_NAME)
- rows = session.execute(password_table.count()).scalar()
- self.assertEqual(0, rows)
-
- def test_migrate_user_skip_user_already_exist_in_local_user(self):
- USER_TABLE_NAME = 'user'
- LOCAL_USER_TABLE_NAME = 'local_user'
- self.upgrade(90)
- user1_ref = unit.new_user_ref(uuid.uuid4().hex)
- # pop extra attribute which doesn't recognized by SQL expression
- # layer.
- user1_ref.pop('email')
- user2_ref = unit.new_user_ref(uuid.uuid4().hex)
- user2_ref.pop('email')
- session = self.Session()
- self.insert_dict(session, USER_TABLE_NAME, user1_ref)
- self.insert_dict(session, USER_TABLE_NAME, user2_ref)
- user_id = user1_ref.pop('id')
- user_name = user1_ref.pop('name')
- domain_id = user1_ref.pop('domain_id')
- local_user_ref = {'user_id': user_id, 'name': user_name,
- 'domain_id': domain_id}
- self.insert_dict(session, LOCAL_USER_TABLE_NAME, local_user_ref)
- self.metadata.clear()
- self.upgrade(91)
- # migration should be successful and user2_ref has been migrated to
- # `local_user` table.
- self.assertTableCountsMatch(USER_TABLE_NAME, LOCAL_USER_TABLE_NAME)
-
- def test_implied_roles_fk_on_delete_cascade(self):
- if self.engine.name == 'sqlite':
- self.skipTest('sqlite backend does not support foreign keys')
-
- self.upgrade(92)
-
- def _create_three_roles():
- id_list = []
- for _ in range(3):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- id_list.append(role['id'])
- return id_list
-
- role_id_list = _create_three_roles()
- self.role_api.create_implied_role(role_id_list[0], role_id_list[1])
- self.role_api.create_implied_role(role_id_list[0], role_id_list[2])
-
- # assert that there are two roles implied by role 0.
- implied_roles = self.role_api.list_implied_roles(role_id_list[0])
- self.assertThat(implied_roles, matchers.HasLength(2))
-
- self.role_api.delete_role(role_id_list[0])
- # assert the cascade deletion is effective.
- implied_roles = self.role_api.list_implied_roles(role_id_list[0])
- self.assertThat(implied_roles, matchers.HasLength(0))
-
- def test_domain_as_project_upgrade(self):
-
- def _populate_domain_and_project_tables(session):
- # Three domains, with various different attributes
- self.domains = [{'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'enabled': True,
- 'extra': {'description': uuid.uuid4().hex,
- 'another_attribute': True}},
- {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'enabled': True,
- 'extra': {'description': uuid.uuid4().hex}},
- {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'enabled': False}]
- # Four projects, two top level, two children
- self.projects = []
- self.projects.append(unit.new_project_ref(
- domain_id=self.domains[0]['id'],
- parent_id=None))
- self.projects.append(unit.new_project_ref(
- domain_id=self.domains[0]['id'],
- parent_id=self.projects[0]['id']))
- self.projects.append(unit.new_project_ref(
- domain_id=self.domains[1]['id'],
- parent_id=None))
- self.projects.append(unit.new_project_ref(
- domain_id=self.domains[1]['id'],
- parent_id=self.projects[2]['id']))
-
- for domain in self.domains:
- this_domain = domain.copy()
- if 'extra' in this_domain:
- this_domain['extra'] = json.dumps(this_domain['extra'])
- self.insert_dict(session, 'domain', this_domain)
- for project in self.projects:
- self.insert_dict(session, 'project', project)
-
- def _check_projects(projects):
-
- def _assert_domain_matches_project(project):
- for domain in self.domains:
- if project.id == domain['id']:
- self.assertEqual(domain['name'], project.name)
- self.assertEqual(domain['enabled'], project.enabled)
- if domain['id'] == self.domains[0]['id']:
- self.assertEqual(domain['extra']['description'],
- project.description)
- self.assertEqual({'another_attribute': True},
- json.loads(project.extra))
- elif domain['id'] == self.domains[1]['id']:
- self.assertEqual(domain['extra']['description'],
- project.description)
- self.assertEqual({}, json.loads(project.extra))
-
- # We had domains 3 we created, which should now be projects acting
- # as domains, To this we add the 4 original projects, plus the root
- # of all domains row.
- self.assertEqual(8, projects.count())
-
- project_ids = []
- for project in projects:
- if project.is_domain:
- self.assertEqual(NULL_DOMAIN_ID, project.domain_id)
- self.assertIsNone(project.parent_id)
- else:
- self.assertIsNotNone(project.domain_id)
- self.assertIsNotNone(project.parent_id)
- project_ids.append(project.id)
-
- for domain in self.domains:
- self.assertIn(domain['id'], project_ids)
- for project in self.projects:
- self.assertIn(project['id'], project_ids)
-
- # Now check the attributes of the domains came across OK
- for project in projects:
- _assert_domain_matches_project(project)
-
- NULL_DOMAIN_ID = '<<keystone.domain.root>>'
- self.upgrade(92)
-
- session = self.Session()
-
- _populate_domain_and_project_tables(session)
-
- self.upgrade(93)
- proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
-
- projects = session.query(proj_table)
- _check_projects(projects)
-
- def test_add_federated_user_table(self):
- federated_user_table = 'federated_user'
- self.upgrade(93)
- self.assertTableDoesNotExist(federated_user_table)
- self.upgrade(94)
- self.assertTableColumns(federated_user_table,
- ['id',
- 'user_id',
- 'idp_id',
- 'protocol_id',
- 'unique_id',
- 'display_name'])
-
- def test_add_int_pkey_to_revocation_event_table(self):
- meta = sqlalchemy.MetaData()
- meta.bind = self.engine
- REVOCATION_EVENT_TABLE_NAME = 'revocation_event'
- self.upgrade(94)
- revocation_event_table = sqlalchemy.Table(REVOCATION_EVENT_TABLE_NAME,
- meta, autoload=True)
- # assert id column is a string (before)
- self.assertEqual('VARCHAR(64)', str(revocation_event_table.c.id.type))
- self.upgrade(95)
- meta.clear()
- revocation_event_table = sqlalchemy.Table(REVOCATION_EVENT_TABLE_NAME,
- meta, autoload=True)
- # assert id column is an integer (after)
- self.assertEqual('INTEGER', str(revocation_event_table.c.id.type))
-
- def _add_unique_constraint_to_role_name(self,
- constraint_name='ixu_role_name'):
- meta = sqlalchemy.MetaData()
- meta.bind = self.engine
- role_table = sqlalchemy.Table('role', meta, autoload=True)
- migrate.UniqueConstraint(role_table.c.name,
- name=constraint_name).create()
-
- def _drop_unique_constraint_to_role_name(self,
- constraint_name='ixu_role_name'):
- role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
- migrate.UniqueConstraint(role_table.c.name,
- name=constraint_name).drop()
-
- def test_migration_88_drops_unique_constraint(self):
- self.upgrade(87)
- if self.engine.name == 'mysql':
- self.assertTrue(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertTrue(self.does_constraint_exist('role',
- 'ixu_role_name'))
- self.upgrade(88)
- if self.engine.name == 'mysql':
- self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertFalse(self.does_constraint_exist('role',
- 'ixu_role_name'))
-
- def test_migration_88_inconsistent_constraint_name(self):
- self.upgrade(87)
- self._drop_unique_constraint_to_role_name()
-
- constraint_name = uuid.uuid4().hex
- self._add_unique_constraint_to_role_name(
- constraint_name=constraint_name)
-
- if self.engine.name == 'mysql':
- self.assertTrue(self.does_index_exist('role', constraint_name))
- self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertTrue(self.does_constraint_exist('role',
- constraint_name))
- self.assertFalse(self.does_constraint_exist('role',
- 'ixu_role_name'))
-
- self.upgrade(88)
- if self.engine.name == 'mysql':
- self.assertFalse(self.does_index_exist('role', constraint_name))
- self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertFalse(self.does_constraint_exist('role',
- constraint_name))
- self.assertFalse(self.does_constraint_exist('role',
- 'ixu_role_name'))
-
- def test_migration_96(self):
- self.upgrade(95)
- if self.engine.name == 'mysql':
- self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertFalse(self.does_constraint_exist('role',
- 'ixu_role_name'))
-
- self.upgrade(96)
- if self.engine.name == 'mysql':
- self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertFalse(self.does_constraint_exist('role',
- 'ixu_role_name'))
-
- def test_migration_96_constraint_exists(self):
- self.upgrade(95)
- self._add_unique_constraint_to_role_name()
-
- if self.engine.name == 'mysql':
- self.assertTrue(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertTrue(self.does_constraint_exist('role',
- 'ixu_role_name'))
-
- self.upgrade(96)
- if self.engine.name == 'mysql':
- self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
- else:
- self.assertFalse(self.does_constraint_exist('role',
- 'ixu_role_name'))
-
-
-class VersionTests(SqlMigrateBase):
-
- _initial_db_version = migration_helpers.get_init_version()
-
- def test_core_initial(self):
- """Get the version before migrated, it's the initial DB version."""
- version = migration_helpers.get_db_version()
- self.assertEqual(self._initial_db_version, version)
-
- def test_core_max(self):
- """When get the version after upgrading, it's the new version."""
- self.upgrade(self.max_version)
- version = migration_helpers.get_db_version()
- self.assertEqual(self.max_version, version)
-
- def test_assert_not_schema_downgrade(self):
- self.upgrade(self.max_version)
- self.assertRaises(
- db_exception.DbMigrationError,
- migration_helpers._sync_common_repo,
- self.max_version - 1)
-
- def test_extension_not_controlled(self):
- """When get the version before controlling, raises DbMigrationError."""
- self.assertRaises(db_exception.DbMigrationError,
- migration_helpers.get_db_version,
- extension='federation')
-
- def test_unexpected_extension(self):
- """The version for a non-existent extension raises ImportError."""
- extension_name = uuid.uuid4().hex
- self.assertRaises(ImportError,
- migration_helpers.get_db_version,
- extension=extension_name)
-
- def test_unversioned_extension(self):
- """The version for extensions without migrations raise an exception."""
- self.assertRaises(exception.MigrationNotProvided,
- migration_helpers.get_db_version,
- extension='admin_crud')
diff --git a/keystone-moon/keystone/tests/unit/test_ssl.py b/keystone-moon/keystone/tests/unit/test_ssl.py
deleted file mode 100644
index 6a6d9ffb..00000000
--- a/keystone-moon/keystone/tests/unit/test_ssl.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import ssl
-
-from oslo_config import cfg
-
-from keystone.common import environment
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import appserver
-
-
-CONF = cfg.CONF
-
-CERTDIR = unit.dirs.root('examples', 'pki', 'certs')
-KEYDIR = unit.dirs.root('examples', 'pki', 'private')
-CERT = os.path.join(CERTDIR, 'ssl_cert.pem')
-KEY = os.path.join(KEYDIR, 'ssl_key.pem')
-CA = os.path.join(CERTDIR, 'cacert.pem')
-CLIENT = os.path.join(CERTDIR, 'middleware.pem')
-
-
-class SSLTestCase(unit.TestCase):
- def setUp(self):
- super(SSLTestCase, self).setUp()
- raise self.skipTest('SSL Version and Ciphers cannot be configured '
- 'with eventlet, some platforms have disabled '
- 'SSLv3. See bug 1381365.')
- # NOTE(morganfainberg): It has been determined that this
- # will not be fixed. These tests should be re-enabled for the full
- # functional test suite when run against an SSL terminated
- # endpoint. Some distributions/environments have patched OpenSSL to
- # not have SSLv3 at all due to POODLE and this causes differing
- # behavior depending on platform. See bug 1381365 for more information.
-
- # NOTE(jamespage):
- # Deal with more secure certificate chain verification
- # introduced in python 2.7.9 under PEP-0476
- # https://github.com/python/peps/blob/master/pep-0476.txt
- self.context = None
- if hasattr(ssl, '_create_unverified_context'):
- self.context = ssl._create_unverified_context()
- self.load_backends()
-
- def get_HTTPSConnection(self, *args):
- """Simple helper to configure HTTPSConnection objects."""
- if self.context:
- return environment.httplib.HTTPSConnection(
- *args,
- context=self.context
- )
- else:
- return environment.httplib.HTTPSConnection(*args)
-
- def test_1way_ssl_ok(self):
- """Make sure both public and admin API work with 1-way SSL."""
- paste_conf = self._paste_config('keystone')
- ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA)
-
- # Verify Admin
- with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '127.0.0.1', CONF.eventlet_server.admin_port)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- # Verify Public
- with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '127.0.0.1', CONF.eventlet_server.public_port)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- def test_2way_ssl_ok(self):
- """Make sure both public and admin API work with 2-way SSL.
-
- Requires client certificate.
- """
- paste_conf = self._paste_config('keystone')
- ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, cert_required=True)
-
- # Verify Admin
- with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '127.0.0.1', CONF.eventlet_server.admin_port, CLIENT, CLIENT)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- # Verify Public
- with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '127.0.0.1', CONF.eventlet_server.public_port, CLIENT, CLIENT)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- def test_1way_ssl_with_ipv6_ok(self):
- """Make sure both public and admin API work with 1-way ipv6 & SSL."""
- self.skip_if_no_ipv6()
-
- paste_conf = self._paste_config('keystone')
- ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, host="::1")
-
- # Verify Admin
- with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '::1', CONF.eventlet_server.admin_port)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- # Verify Public
- with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '::1', CONF.eventlet_server.public_port)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- def test_2way_ssl_with_ipv6_ok(self):
- """Make sure both public and admin API work with 2-way ipv6 & SSL.
-
- Requires client certificate.
- """
- self.skip_if_no_ipv6()
-
- paste_conf = self._paste_config('keystone')
- ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA,
- cert_required=True, host="::1")
-
- # Verify Admin
- with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '::1', CONF.eventlet_server.admin_port, CLIENT, CLIENT)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- # Verify Public
- with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '::1', CONF.eventlet_server.public_port, CLIENT, CLIENT)
- conn.request('GET', '/')
- resp = conn.getresponse()
- self.assertEqual(300, resp.status)
-
- def test_2way_ssl_fail(self):
- """Expect to fail when client does not present proper certificate."""
- paste_conf = self._paste_config('keystone')
- ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, cert_required=True)
-
- # Verify Admin
- with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '127.0.0.1', CONF.eventlet_server.admin_port)
- try:
- conn.request('GET', '/')
- self.fail('Admin API shoulda failed with SSL handshake!')
- except ssl.SSLError:
- pass
-
- # Verify Public
- with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs):
- conn = self.get_HTTPSConnection(
- '127.0.0.1', CONF.eventlet_server.public_port)
- try:
- conn.request('GET', '/')
- self.fail('Public API shoulda failed with SSL handshake!')
- except ssl.SSLError:
- pass
diff --git a/keystone-moon/keystone/tests/unit/test_token_bind.py b/keystone-moon/keystone/tests/unit/test_token_bind.py
deleted file mode 100644
index ee4d011a..00000000
--- a/keystone-moon/keystone/tests/unit/test_token_bind.py
+++ /dev/null
@@ -1,198 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-from keystone.common import wsgi
-from keystone import exception
-from keystone.models import token_model
-from keystone.tests import unit
-from keystone.tests.unit import test_token_provider
-
-
-KERBEROS_BIND = 'USER@REALM'
-ANY = 'any'
-
-
-class BindTest(unit.TestCase):
- """Test binding tokens to a Principal.
-
- Even though everything in this file references kerberos the same concepts
- will apply to all future binding mechanisms.
- """
-
- def setUp(self):
- super(BindTest, self).setUp()
- self.TOKEN_BIND_KERB = copy.deepcopy(
- test_token_provider.SAMPLE_V3_TOKEN)
- self.TOKEN_BIND_KERB['token']['bind'] = {'kerberos': KERBEROS_BIND}
- self.TOKEN_BIND_UNKNOWN = copy.deepcopy(
- test_token_provider.SAMPLE_V3_TOKEN)
- self.TOKEN_BIND_UNKNOWN['token']['bind'] = {'FOO': 'BAR'}
- self.TOKEN_BIND_NONE = copy.deepcopy(
- test_token_provider.SAMPLE_V3_TOKEN)
-
- self.ALL_TOKENS = [self.TOKEN_BIND_KERB, self.TOKEN_BIND_UNKNOWN,
- self.TOKEN_BIND_NONE]
-
- def assert_kerberos_bind(self, tokens, bind_level,
- use_kerberos=True, success=True):
- if not isinstance(tokens, dict):
- for token in tokens:
- self.assert_kerberos_bind(token, bind_level,
- use_kerberos=use_kerberos,
- success=success)
- elif use_kerberos == ANY:
- for val in (True, False):
- self.assert_kerberos_bind(tokens, bind_level,
- use_kerberos=val, success=success)
- else:
- context = {'environment': {}}
- self.config_fixture.config(group='token',
- enforce_token_bind=bind_level)
-
- if use_kerberos:
- context['environment']['REMOTE_USER'] = KERBEROS_BIND
- context['environment']['AUTH_TYPE'] = 'Negotiate'
-
- # NOTE(morganfainberg): This assumes a V3 token.
- token_ref = token_model.KeystoneToken(
- token_id=uuid.uuid4().hex,
- token_data=tokens)
-
- if not success:
- self.assertRaises(exception.Unauthorized,
- wsgi.validate_token_bind,
- context, token_ref)
- else:
- wsgi.validate_token_bind(context, token_ref)
-
- # DISABLED
-
- def test_bind_disabled_with_kerb_user(self):
- self.assert_kerberos_bind(self.ALL_TOKENS,
- bind_level='disabled',
- use_kerberos=ANY,
- success=True)
-
- # PERMISSIVE
-
- def test_bind_permissive_with_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='permissive',
- use_kerberos=True,
- success=True)
-
- def test_bind_permissive_with_regular_token(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
- bind_level='permissive',
- use_kerberos=ANY,
- success=True)
-
- def test_bind_permissive_without_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='permissive',
- use_kerberos=False,
- success=False)
-
- def test_bind_permissive_with_unknown_bind(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
- bind_level='permissive',
- use_kerberos=ANY,
- success=True)
-
- # STRICT
-
- def test_bind_strict_with_regular_token(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
- bind_level='strict',
- use_kerberos=ANY,
- success=True)
-
- def test_bind_strict_with_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='strict',
- use_kerberos=True,
- success=True)
-
- def test_bind_strict_without_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='strict',
- use_kerberos=False,
- success=False)
-
- def test_bind_strict_with_unknown_bind(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
- bind_level='strict',
- use_kerberos=ANY,
- success=False)
-
- # REQUIRED
-
- def test_bind_required_with_regular_token(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
- bind_level='required',
- use_kerberos=ANY,
- success=False)
-
- def test_bind_required_with_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='required',
- use_kerberos=True,
- success=True)
-
- def test_bind_required_without_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='required',
- use_kerberos=False,
- success=False)
-
- def test_bind_required_with_unknown_bind(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
- bind_level='required',
- use_kerberos=ANY,
- success=False)
-
- # NAMED
-
- def test_bind_named_with_regular_token(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_NONE,
- bind_level='kerberos',
- use_kerberos=ANY,
- success=False)
-
- def test_bind_named_with_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='kerberos',
- use_kerberos=True,
- success=True)
-
- def test_bind_named_without_kerb_user(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_KERB,
- bind_level='kerberos',
- use_kerberos=False,
- success=False)
-
- def test_bind_named_with_unknown_bind(self):
- self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN,
- bind_level='kerberos',
- use_kerberos=ANY,
- success=False)
-
- def test_bind_named_with_unknown_scheme(self):
- self.assert_kerberos_bind(self.ALL_TOKENS,
- bind_level='unknown',
- use_kerberos=ANY,
- success=False)
diff --git a/keystone-moon/keystone/tests/unit/test_token_provider.py b/keystone-moon/keystone/tests/unit/test_token_provider.py
deleted file mode 100644
index 5c71363b..00000000
--- a/keystone-moon/keystone/tests/unit/test_token_provider.py
+++ /dev/null
@@ -1,845 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import datetime
-
-from oslo_config import cfg
-from oslo_utils import timeutils
-from six.moves import reload_module
-
-from keystone.common import dependency
-from keystone.common import utils
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-from keystone import token
-from keystone.token.providers import fernet
-from keystone.token.providers import pki
-from keystone.token.providers import pkiz
-from keystone.token.providers import uuid
-
-
-CONF = cfg.CONF
-
-FUTURE_DELTA = datetime.timedelta(seconds=CONF.token.expiration)
-CURRENT_DATE = timeutils.utcnow()
-
-SAMPLE_V2_TOKEN = {
- "access": {
- "trust": {
- "id": "abc123",
- "trustee_user_id": "123456",
- "trustor_user_id": "333333",
- "impersonation": False
- },
- "serviceCatalog": [
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8774/v1.1/01257",
- "id": "51934fe63a5b4ac0a32664f64eb462c3",
- "internalURL": "http://localhost:8774/v1.1/01257",
- "publicURL": "http://localhost:8774/v1.1/01257",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "nova",
- "type": "compute"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:9292",
- "id": "aaa17a539e364297a7845d67c7c7cc4b",
- "internalURL": "http://localhost:9292",
- "publicURL": "http://localhost:9292",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "glance",
- "type": "image"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8776/v1/01257",
- "id": "077d82df25304abeac2294004441db5a",
- "internalURL": "http://localhost:8776/v1/01257",
- "publicURL": "http://localhost:8776/v1/01257",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "volume",
- "type": "volume"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8773/services/Admin",
- "id": "b06997fd08414903ad458836efaa9067",
- "internalURL": "http://localhost:8773/services/Cloud",
- "publicURL": "http://localhost:8773/services/Cloud",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "ec2",
- "type": "ec2"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8080/v1",
- "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
- "internalURL": "http://localhost:8080/v1/AUTH_01257",
- "publicURL": "http://localhost:8080/v1/AUTH_01257",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "swift",
- "type": "object-store"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:35357/v2.0",
- "id": "02850c5d1d094887bdc46e81e1e15dc7",
- "internalURL": "http://localhost:5000/v2.0",
- "publicURL": "http://localhost:5000/v2.0",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "keystone",
- "type": "identity"
- }
- ],
- "token": {
- "expires": "2013-05-22T00:02:43.941430Z",
- "id": "ce4fc2d36eea4cc9a36e666ac2f1029a",
- "issued_at": "2013-05-21T00:02:43.941473Z",
- "tenant": {
- "enabled": True,
- "id": "01257",
- "name": "service"
- }
- },
- "user": {
- "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
- "name": "nova",
- "roles": [
- {
- "name": "_member_"
- },
- {
- "name": "admin"
- }
- ],
- "roles_links": [],
- "username": "nova"
- }
- }
-}
-
-SAMPLE_V3_TOKEN = {
- "token": {
- "catalog": [
- {
- "endpoints": [
- {
- "id": "02850c5d1d094887bdc46e81e1e15dc7",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:35357/v2.0"
- },
- {
- "id": "446e244b75034a9ab4b0811e82d0b7c8",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:5000/v2.0"
- },
- {
- "id": "47fa3d9f499240abb5dfcf2668f168cd",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:5000/v2.0"
- }
- ],
- "id": "26d7541715a44a4d9adad96f9872b633",
- "type": "identity",
- },
- {
- "endpoints": [
- {
- "id": "aaa17a539e364297a7845d67c7c7cc4b",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:9292"
- },
- {
- "id": "4fa9620e42394cb1974736dce0856c71",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:9292"
- },
- {
- "id": "9673687f9bc441d88dec37942bfd603b",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:9292"
- }
- ],
- "id": "d27a41843f4e4b0e8cf6dac4082deb0d",
- "type": "image",
- },
- {
- "endpoints": [
- {
- "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8080/v1"
- },
- {
- "id": "43bef154594d4ccb8e49014d20624e1d",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8080/v1/AUTH_01257"
- },
- {
- "id": "e63b5f5d7aa3493690189d0ff843b9b3",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8080/v1/AUTH_01257"
- }
- ],
- "id": "a669e152f1104810a4b6701aade721bb",
- "type": "object-store",
- },
- {
- "endpoints": [
- {
- "id": "51934fe63a5b4ac0a32664f64eb462c3",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8774/v1.1/01257"
- },
- {
- "id": "869b535eea0d42e483ae9da0d868ebad",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8774/v1.1/01257"
- },
- {
- "id": "93583824c18f4263a2245ca432b132a6",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8774/v1.1/01257"
- }
- ],
- "id": "7f32cc2af6c9476e82d75f80e8b3bbb8",
- "type": "compute",
- },
- {
- "endpoints": [
- {
- "id": "b06997fd08414903ad458836efaa9067",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8773/services/Admin"
- },
- {
- "id": "411f7de7c9a8484c9b46c254fb2676e2",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8773/services/Cloud"
- },
- {
- "id": "f21c93f3da014785854b4126d0109c49",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8773/services/Cloud"
- }
- ],
- "id": "b08c9c7d4ef543eba5eeb766f72e5aa1",
- "type": "ec2",
- },
- {
- "endpoints": [
- {
- "id": "077d82df25304abeac2294004441db5a",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8776/v1/01257"
- },
- {
- "id": "875bf282362c40219665278b4fd11467",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8776/v1/01257"
- },
- {
- "id": "cd229aa6df0640dc858a8026eb7e640c",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8776/v1/01257"
- }
- ],
- "id": "5db21b82617f4a95816064736a7bec22",
- "type": "volume",
- }
- ],
- "expires_at": "2013-05-22T00:02:43.941430Z",
- "issued_at": "2013-05-21T00:02:43.941473Z",
- "methods": [
- "password"
- ],
- "project": {
- "domain": {
- "id": "default",
- "name": "Default"
- },
- "id": "01257",
- "name": "service"
- },
- "roles": [
- {
- "id": "9fe2ff9ee4384b1894a90878d3e92bab",
- "name": "_member_"
- },
- {
- "id": "53bff13443bd4450b97f978881d47b18",
- "name": "admin"
- }
- ],
- "user": {
- "domain": {
- "id": "default",
- "name": "Default"
- },
- "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
- "name": "nova"
- },
- "OS-TRUST:trust": {
- "id": "abc123",
- "trustee_user_id": "123456",
- "trustor_user_id": "333333",
- "impersonation": False
- }
- }
-}
-
-SAMPLE_V2_TOKEN_WITH_EMBEDED_VERSION = {
- "access": {
- "trust": {
- "id": "abc123",
- "trustee_user_id": "123456",
- "trustor_user_id": "333333",
- "impersonation": False
- },
- "serviceCatalog": [
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8774/v1.1/01257",
- "id": "51934fe63a5b4ac0a32664f64eb462c3",
- "internalURL": "http://localhost:8774/v1.1/01257",
- "publicURL": "http://localhost:8774/v1.1/01257",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "nova",
- "type": "compute"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:9292",
- "id": "aaa17a539e364297a7845d67c7c7cc4b",
- "internalURL": "http://localhost:9292",
- "publicURL": "http://localhost:9292",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "glance",
- "type": "image"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8776/v1/01257",
- "id": "077d82df25304abeac2294004441db5a",
- "internalURL": "http://localhost:8776/v1/01257",
- "publicURL": "http://localhost:8776/v1/01257",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "volume",
- "type": "volume"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8773/services/Admin",
- "id": "b06997fd08414903ad458836efaa9067",
- "internalURL": "http://localhost:8773/services/Cloud",
- "publicURL": "http://localhost:8773/services/Cloud",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "ec2",
- "type": "ec2"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:8080/v1",
- "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
- "internalURL": "http://localhost:8080/v1/AUTH_01257",
- "publicURL": "http://localhost:8080/v1/AUTH_01257",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "swift",
- "type": "object-store"
- },
- {
- "endpoints": [
- {
- "adminURL": "http://localhost:35357/v2.0",
- "id": "02850c5d1d094887bdc46e81e1e15dc7",
- "internalURL": "http://localhost:5000/v2.0",
- "publicURL": "http://localhost:5000/v2.0",
- "region": "RegionOne"
- }
- ],
- "endpoints_links": [],
- "name": "keystone",
- "type": "identity"
- }
- ],
- "token": {
- "expires": "2013-05-22T00:02:43.941430Z",
- "id": "ce4fc2d36eea4cc9a36e666ac2f1029a",
- "issued_at": "2013-05-21T00:02:43.941473Z",
- "tenant": {
- "enabled": True,
- "id": "01257",
- "name": "service"
- }
- },
- "user": {
- "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
- "name": "nova",
- "roles": [
- {
- "name": "_member_"
- },
- {
- "name": "admin"
- }
- ],
- "roles_links": [],
- "username": "nova"
- }
- },
- 'token_version': 'v2.0'
-}
-SAMPLE_V3_TOKEN_WITH_EMBEDED_VERSION = {
- "token": {
- "catalog": [
- {
- "endpoints": [
- {
- "id": "02850c5d1d094887bdc46e81e1e15dc7",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:35357/v2.0"
- },
- {
- "id": "446e244b75034a9ab4b0811e82d0b7c8",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:5000/v2.0"
- },
- {
- "id": "47fa3d9f499240abb5dfcf2668f168cd",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:5000/v2.0"
- }
- ],
- "id": "26d7541715a44a4d9adad96f9872b633",
- "type": "identity",
- },
- {
- "endpoints": [
- {
- "id": "aaa17a539e364297a7845d67c7c7cc4b",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:9292"
- },
- {
- "id": "4fa9620e42394cb1974736dce0856c71",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:9292"
- },
- {
- "id": "9673687f9bc441d88dec37942bfd603b",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:9292"
- }
- ],
- "id": "d27a41843f4e4b0e8cf6dac4082deb0d",
- "type": "image",
- },
- {
- "endpoints": [
- {
- "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8080/v1"
- },
- {
- "id": "43bef154594d4ccb8e49014d20624e1d",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8080/v1/AUTH_01257"
- },
- {
- "id": "e63b5f5d7aa3493690189d0ff843b9b3",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8080/v1/AUTH_01257"
- }
- ],
- "id": "a669e152f1104810a4b6701aade721bb",
- "type": "object-store",
- },
- {
- "endpoints": [
- {
- "id": "51934fe63a5b4ac0a32664f64eb462c3",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8774/v1.1/01257"
- },
- {
- "id": "869b535eea0d42e483ae9da0d868ebad",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8774/v1.1/01257"
- },
- {
- "id": "93583824c18f4263a2245ca432b132a6",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8774/v1.1/01257"
- }
- ],
- "id": "7f32cc2af6c9476e82d75f80e8b3bbb8",
- "type": "compute",
- },
- {
- "endpoints": [
- {
- "id": "b06997fd08414903ad458836efaa9067",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8773/services/Admin"
- },
- {
- "id": "411f7de7c9a8484c9b46c254fb2676e2",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8773/services/Cloud"
- },
- {
- "id": "f21c93f3da014785854b4126d0109c49",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8773/services/Cloud"
- }
- ],
- "id": "b08c9c7d4ef543eba5eeb766f72e5aa1",
- "type": "ec2",
- },
- {
- "endpoints": [
- {
- "id": "077d82df25304abeac2294004441db5a",
- "interface": "admin",
- "region": "RegionOne",
- "url": "http://localhost:8776/v1/01257"
- },
- {
- "id": "875bf282362c40219665278b4fd11467",
- "interface": "internal",
- "region": "RegionOne",
- "url": "http://localhost:8776/v1/01257"
- },
- {
- "id": "cd229aa6df0640dc858a8026eb7e640c",
- "interface": "public",
- "region": "RegionOne",
- "url": "http://localhost:8776/v1/01257"
- }
- ],
- "id": "5db21b82617f4a95816064736a7bec22",
- "type": "volume",
- }
- ],
- "expires_at": "2013-05-22T00:02:43.941430Z",
- "issued_at": "2013-05-21T00:02:43.941473Z",
- "methods": [
- "password"
- ],
- "project": {
- "domain": {
- "id": "default",
- "name": "Default"
- },
- "id": "01257",
- "name": "service"
- },
- "roles": [
- {
- "id": "9fe2ff9ee4384b1894a90878d3e92bab",
- "name": "_member_"
- },
- {
- "id": "53bff13443bd4450b97f978881d47b18",
- "name": "admin"
- }
- ],
- "user": {
- "domain": {
- "id": "default",
- "name": "Default"
- },
- "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce",
- "name": "nova"
- },
- "OS-TRUST:trust": {
- "id": "abc123",
- "trustee_user_id": "123456",
- "trustor_user_id": "333333",
- "impersonation": False
- }
- },
- 'token_version': 'v3.0'
-}
-
-
-def create_v2_token():
- return {
- "access": {
- "token": {
- "expires": utils.isotime(timeutils.utcnow() +
- FUTURE_DELTA),
- "issued_at": "2013-05-21T00:02:43.941473Z",
- "tenant": {
- "enabled": True,
- "id": "01257",
- "name": "service"
- }
- }
- }
- }
-
-
-SAMPLE_V2_TOKEN_EXPIRED = {
- "access": {
- "token": {
- "expires": utils.isotime(CURRENT_DATE),
- "issued_at": "2013-05-21T00:02:43.941473Z",
- "tenant": {
- "enabled": True,
- "id": "01257",
- "name": "service"
- }
- }
- }
-}
-
-
-def create_v3_token():
- return {
- "token": {
- 'methods': [],
- "expires_at": utils.isotime(timeutils.utcnow() + FUTURE_DELTA),
- "issued_at": "2013-05-21T00:02:43.941473Z",
- }
- }
-
-
-SAMPLE_V3_TOKEN_EXPIRED = {
- "token": {
- "expires_at": utils.isotime(CURRENT_DATE),
- "issued_at": "2013-05-21T00:02:43.941473Z",
- }
-}
-
-SAMPLE_MALFORMED_TOKEN = {
- "token": {
- "bogus": {
- "no expiration data": None
- }
- }
-}
-
-
-class TestTokenProvider(unit.TestCase):
- def setUp(self):
- super(TestTokenProvider, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
-
- def test_get_token_version(self):
- self.assertEqual(
- token.provider.V2,
- self.token_provider_api.get_token_version(SAMPLE_V2_TOKEN))
- self.assertEqual(
- token.provider.V2,
- self.token_provider_api.get_token_version(
- SAMPLE_V2_TOKEN_WITH_EMBEDED_VERSION))
- self.assertEqual(
- token.provider.V3,
- self.token_provider_api.get_token_version(SAMPLE_V3_TOKEN))
- self.assertEqual(
- token.provider.V3,
- self.token_provider_api.get_token_version(
- SAMPLE_V3_TOKEN_WITH_EMBEDED_VERSION))
- self.assertRaises(exception.UnsupportedTokenVersionException,
- self.token_provider_api.get_token_version,
- 'bogus')
-
- def test_supported_token_providers(self):
- # test default config
-
- dependency.reset()
- self.assertIsInstance(token.provider.Manager().driver,
- uuid.Provider)
-
- dependency.reset()
- self.config_fixture.config(group='token', provider='uuid')
- self.assertIsInstance(token.provider.Manager().driver, uuid.Provider)
-
- dependency.reset()
- self.config_fixture.config(group='token', provider='pki')
- self.assertIsInstance(token.provider.Manager().driver, pki.Provider)
-
- dependency.reset()
- self.config_fixture.config(group='token', provider='pkiz')
- self.assertIsInstance(token.provider.Manager().driver, pkiz.Provider)
-
- dependency.reset()
- self.config_fixture.config(group='token', provider='fernet')
- self.assertIsInstance(token.provider.Manager().driver, fernet.Provider)
-
- def test_unsupported_token_provider(self):
- self.config_fixture.config(group='token',
- provider='my.package.MyProvider')
- self.assertRaises(ImportError,
- token.provider.Manager)
-
- def test_provider_token_expiration_validation(self):
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._is_valid_token,
- SAMPLE_V2_TOKEN_EXPIRED)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._is_valid_token,
- SAMPLE_V3_TOKEN_EXPIRED)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._is_valid_token,
- SAMPLE_MALFORMED_TOKEN)
- self.assertIsNone(
- self.token_provider_api._is_valid_token(create_v2_token()))
- self.assertIsNone(
- self.token_provider_api._is_valid_token(create_v3_token()))
-
- def test_no_token_raises_token_not_found(self):
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_token,
- None)
-
-
-# NOTE(ayoung): renamed to avoid automatic test detection
-class PKIProviderTests(object):
-
- def setUp(self):
- super(PKIProviderTests, self).setUp()
-
- from keystoneclient.common import cms
- self.cms = cms
-
- from keystone.common import environment
- self.environment = environment
-
- old_cms_subprocess = cms.subprocess
- self.addCleanup(setattr, cms, 'subprocess', old_cms_subprocess)
-
- old_env_subprocess = environment.subprocess
- self.addCleanup(setattr, environment, 'subprocess', old_env_subprocess)
-
- self.cms.subprocess = self.target_subprocess
- self.environment.subprocess = self.target_subprocess
-
- # force module reload so the imports get re-evaluated
- reload_module(pki)
-
- def test_get_token_id_error_handling(self):
- # cause command-line failure
- self.config_fixture.config(group='signing',
- keyfile='--please-break-me')
-
- provider = pki.Provider()
- token_data = {}
- self.assertRaises(exception.UnexpectedError,
- provider._get_token_id,
- token_data)
-
-
-class TestPKIProviderWithEventlet(PKIProviderTests, unit.TestCase):
-
- def setUp(self):
- # force keystoneclient.common.cms to use eventlet's subprocess
- from eventlet.green import subprocess
- self.target_subprocess = subprocess
-
- super(TestPKIProviderWithEventlet, self).setUp()
-
-
-class TestPKIProviderWithStdlib(PKIProviderTests, unit.TestCase):
-
- def setUp(self):
- # force keystoneclient.common.cms to use the stdlib subprocess
- import subprocess
- self.target_subprocess = subprocess
-
- super(TestPKIProviderWithStdlib, self).setUp()
diff --git a/keystone-moon/keystone/tests/unit/test_url_middleware.py b/keystone-moon/keystone/tests/unit/test_url_middleware.py
deleted file mode 100644
index 3b160b93..00000000
--- a/keystone-moon/keystone/tests/unit/test_url_middleware.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import webob
-
-from keystone import middleware
-from keystone.tests import unit
-
-
-class FakeApp(object):
- """Fakes a WSGI app URL normalized."""
-
- def __call__(self, env, start_response):
- resp = webob.Response()
- resp.body = 'SUCCESS'
- return resp(env, start_response)
-
-
-class UrlMiddlewareTest(unit.TestCase):
- def setUp(self):
- self.middleware = middleware.NormalizingFilter(FakeApp())
- self.response_status = None
- self.response_headers = None
- super(UrlMiddlewareTest, self).setUp()
-
- def start_fake_response(self, status, headers):
- self.response_status = int(status.split(' ', 1)[0])
- self.response_headers = dict(headers)
-
- def test_trailing_slash_normalization(self):
- """Tests /v2.0/tokens and /v2.0/tokens/ normalized URLs match."""
- req1 = webob.Request.blank('/v2.0/tokens')
- req2 = webob.Request.blank('/v2.0/tokens/')
- self.middleware(req1.environ, self.start_fake_response)
- self.middleware(req2.environ, self.start_fake_response)
- self.assertEqual(req1.path_url, req2.path_url)
- self.assertEqual('http://localhost/v2.0/tokens', req1.path_url)
-
- def test_rewrite_empty_path(self):
- """Tests empty path is rewritten to root."""
- req = webob.Request.blank('')
- self.middleware(req.environ, self.start_fake_response)
- self.assertEqual('http://localhost/', req.path_url)
diff --git a/keystone-moon/keystone/tests/unit/test_v2.py b/keystone-moon/keystone/tests/unit/test_v2.py
deleted file mode 100644
index e81c6040..00000000
--- a/keystone-moon/keystone/tests/unit/test_v2.py
+++ /dev/null
@@ -1,1590 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import json
-import time
-import uuid
-
-from keystoneclient.common import cms
-from oslo_config import cfg
-import six
-from six.moves import http_client
-from testtools import matchers
-
-from keystone.common import extension as keystone_extension
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit import rest
-from keystone.tests.unit.schema import v2
-
-CONF = cfg.CONF
-
-
-class CoreApiTests(object):
- def assertValidError(self, error):
- self.assertIsNotNone(error.get('code'))
- self.assertIsNotNone(error.get('title'))
- self.assertIsNotNone(error.get('message'))
-
- def assertValidVersion(self, version):
- self.assertIsNotNone(version)
- self.assertIsNotNone(version.get('id'))
- self.assertIsNotNone(version.get('status'))
- self.assertIsNotNone(version.get('updated'))
-
- def assertValidExtension(self, extension):
- self.assertIsNotNone(extension)
- self.assertIsNotNone(extension.get('name'))
- self.assertIsNotNone(extension.get('namespace'))
- self.assertIsNotNone(extension.get('alias'))
- self.assertIsNotNone(extension.get('updated'))
-
- def assertValidExtensionLink(self, link):
- self.assertIsNotNone(link.get('rel'))
- self.assertIsNotNone(link.get('type'))
- self.assertIsNotNone(link.get('href'))
-
- def assertValidTenant(self, tenant):
- self.assertIsNotNone(tenant.get('id'))
- self.assertIsNotNone(tenant.get('name'))
- self.assertNotIn('domain_id', tenant)
- self.assertNotIn('parent_id', tenant)
-
- def assertValidUser(self, user):
- self.assertIsNotNone(user.get('id'))
- self.assertIsNotNone(user.get('name'))
-
- def assertValidRole(self, tenant):
- self.assertIsNotNone(tenant.get('id'))
- self.assertIsNotNone(tenant.get('name'))
-
- def test_public_not_found(self):
- r = self.public_request(
- path='/%s' % uuid.uuid4().hex,
- expected_status=http_client.NOT_FOUND)
- self.assertValidErrorResponse(r)
-
- def test_admin_not_found(self):
- r = self.admin_request(
- path='/%s' % uuid.uuid4().hex,
- expected_status=http_client.NOT_FOUND)
- self.assertValidErrorResponse(r)
-
- def test_public_multiple_choice(self):
- r = self.public_request(path='/', expected_status=300)
- self.assertValidMultipleChoiceResponse(r)
-
- def test_admin_multiple_choice(self):
- r = self.admin_request(path='/', expected_status=300)
- self.assertValidMultipleChoiceResponse(r)
-
- def test_public_version(self):
- r = self.public_request(path='/v2.0/')
- self.assertValidVersionResponse(r)
-
- def test_admin_version(self):
- r = self.admin_request(path='/v2.0/')
- self.assertValidVersionResponse(r)
-
- def test_public_extensions(self):
- r = self.public_request(path='/v2.0/extensions')
- self.assertValidExtensionListResponse(
- r, keystone_extension.PUBLIC_EXTENSIONS)
-
- def test_admin_extensions(self):
- r = self.admin_request(path='/v2.0/extensions')
- self.assertValidExtensionListResponse(
- r, keystone_extension.ADMIN_EXTENSIONS)
-
- def test_admin_extensions_returns_not_found(self):
- self.admin_request(path='/v2.0/extensions/invalid-extension',
- expected_status=http_client.NOT_FOUND)
-
- def test_public_osksadm_extension_returns_not_found(self):
- self.public_request(path='/v2.0/extensions/OS-KSADM',
- expected_status=http_client.NOT_FOUND)
-
- def test_admin_osksadm_extension(self):
- r = self.admin_request(path='/v2.0/extensions/OS-KSADM')
- self.assertValidExtensionResponse(
- r, keystone_extension.ADMIN_EXTENSIONS)
-
- def test_authenticate(self):
- r = self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': self.user_foo['password'],
- },
- 'tenantId': self.tenant_bar['id'],
- },
- },
- expected_status=http_client.OK)
- self.assertValidAuthenticationResponse(r, require_service_catalog=True)
-
- def test_authenticate_unscoped(self):
- r = self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': self.user_foo['password'],
- },
- },
- },
- expected_status=http_client.OK)
- self.assertValidAuthenticationResponse(r)
-
- def test_get_tenants_for_token(self):
- r = self.public_request(path='/v2.0/tenants',
- token=self.get_scoped_token())
- self.assertValidTenantListResponse(r)
-
- def test_validate_token(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- path='/v2.0/tokens/%(token_id)s' % {
- 'token_id': token,
- },
- token=token)
- self.assertValidAuthenticationResponse(r)
-
- def test_invalid_token_returns_not_found(self):
- token = self.get_scoped_token()
- self.admin_request(
- path='/v2.0/tokens/%(token_id)s' % {
- 'token_id': 'invalid',
- },
- token=token,
- expected_status=http_client.NOT_FOUND)
-
- def test_validate_token_service_role(self):
- self.md_foobar = self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_service['id'],
- self.role_service['id'])
-
- token = self.get_scoped_token(
- tenant_id=default_fixtures.SERVICE_TENANT_ID)
- r = self.admin_request(
- path='/v2.0/tokens/%s' % token,
- token=token)
- self.assertValidAuthenticationResponse(r)
-
- def test_remove_role_revokes_token(self):
- self.md_foobar = self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_service['id'],
- self.role_service['id'])
-
- token = self.get_scoped_token(
- tenant_id=default_fixtures.SERVICE_TENANT_ID)
- r = self.admin_request(
- path='/v2.0/tokens/%s' % token,
- token=token)
- self.assertValidAuthenticationResponse(r)
-
- self.assignment_api.remove_role_from_user_and_project(
- self.user_foo['id'],
- self.tenant_service['id'],
- self.role_service['id'])
-
- r = self.admin_request(
- path='/v2.0/tokens/%s' % token,
- token=token,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_validate_token_belongs_to(self):
- token = self.get_scoped_token()
- path = ('/v2.0/tokens/%s?belongsTo=%s' % (token,
- self.tenant_bar['id']))
- r = self.admin_request(path=path, token=token)
- self.assertValidAuthenticationResponse(r, require_service_catalog=True)
-
- def test_validate_token_no_belongs_to_still_returns_catalog(self):
- token = self.get_scoped_token()
- path = ('/v2.0/tokens/%s' % token)
- r = self.admin_request(path=path, token=token)
- self.assertValidAuthenticationResponse(r, require_service_catalog=True)
-
- def test_validate_token_head(self):
- """The same call as above, except using HEAD.
-
- There's no response to validate here, but this is included for the
- sake of completely covering the core API.
-
- """
- token = self.get_scoped_token()
- self.admin_request(
- method='HEAD',
- path='/v2.0/tokens/%(token_id)s' % {
- 'token_id': token,
- },
- token=token,
- expected_status=http_client.OK)
-
- def test_endpoints(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- path='/v2.0/tokens/%(token_id)s/endpoints' % {
- 'token_id': token,
- },
- token=token)
- self.assertValidEndpointListResponse(r)
-
- def test_get_tenant(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- path='/v2.0/tenants/%(tenant_id)s' % {
- 'tenant_id': self.tenant_bar['id'],
- },
- token=token)
- self.assertValidTenantResponse(r)
-
- def test_get_tenant_by_name(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- path='/v2.0/tenants?name=%(tenant_name)s' % {
- 'tenant_name': self.tenant_bar['name'],
- },
- token=token)
- self.assertValidTenantResponse(r)
-
- def test_get_user_roles_with_tenant(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- path='/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % {
- 'tenant_id': self.tenant_bar['id'],
- 'user_id': self.user_foo['id'],
- },
- token=token)
- self.assertValidRoleListResponse(r)
-
- def test_get_user_roles_without_tenant(self):
- token = self.get_scoped_token()
- self.admin_request(
- path='/v2.0/users/%(user_id)s/roles' % {
- 'user_id': self.user_foo['id'],
- },
- token=token, expected_status=http_client.NOT_IMPLEMENTED)
-
- def test_get_user(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- path='/v2.0/users/%(user_id)s' % {
- 'user_id': self.user_foo['id'],
- },
- token=token)
- self.assertValidUserResponse(r)
-
- def test_get_user_by_name(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- path='/v2.0/users?name=%(user_name)s' % {
- 'user_name': self.user_foo['name'],
- },
- token=token)
- self.assertValidUserResponse(r)
-
- def test_create_update_user_invalid_enabled_type(self):
- # Enforce usage of boolean for 'enabled' field
- token = self.get_scoped_token()
-
- # Test CREATE request
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- 'enabled': "False",
- },
- },
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(r)
-
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- # In JSON, 0|1 are not booleans
- 'enabled': 0,
- },
- },
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(r)
-
- # Test UPDATE request
- path = '/v2.0/users/%(user_id)s' % {
- 'user_id': self.user_foo['id'],
- }
-
- r = self.admin_request(
- method='PUT',
- path=path,
- body={
- 'user': {
- 'enabled': "False",
- },
- },
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(r)
-
- r = self.admin_request(
- method='PUT',
- path=path,
- body={
- 'user': {
- # In JSON, 0|1 are not booleans
- 'enabled': 1,
- },
- },
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(r)
-
- def test_create_update_user_valid_enabled_type(self):
- # Enforce usage of boolean for 'enabled' field
- token = self.get_scoped_token()
-
- # Test CREATE request
- self.admin_request(method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- 'enabled': False,
- },
- },
- token=token,
- expected_status=http_client.OK)
-
- def test_error_response(self):
- """This triggers assertValidErrorResponse by convention."""
- self.public_request(path='/v2.0/tenants',
- expected_status=http_client.UNAUTHORIZED)
-
- def test_invalid_parameter_error_response(self):
- token = self.get_scoped_token()
- bad_body = {
- 'OS-KSADM:service%s' % uuid.uuid4().hex: {
- 'name': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- },
- }
- res = self.admin_request(method='POST',
- path='/v2.0/OS-KSADM/services',
- body=bad_body,
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(res)
- res = self.admin_request(method='POST',
- path='/v2.0/users',
- body=bad_body,
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(res)
-
- def _get_user_id(self, r):
- """Helper method to return user ID from a response.
-
- This needs to be overridden by child classes
- based on their content type.
-
- """
- raise NotImplementedError()
-
- def _get_role_id(self, r):
- """Helper method to return a role ID from a response.
-
- This needs to be overridden by child classes
- based on their content type.
-
- """
- raise NotImplementedError()
-
- def _get_role_name(self, r):
- """Helper method to return role NAME from a response.
-
- This needs to be overridden by child classes
- based on their content type.
-
- """
- raise NotImplementedError()
-
- def _get_project_id(self, r):
- """Helper method to return project ID from a response.
-
- This needs to be overridden by child classes
- based on their content type.
-
- """
- raise NotImplementedError()
-
- def assertNoRoles(self, r):
- """Helper method to assert No Roles
-
- This needs to be overridden by child classes
- based on their content type.
-
- """
- raise NotImplementedError()
-
- def test_update_user_tenant(self):
- token = self.get_scoped_token()
-
- # Create a new user
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- 'tenantId': self.tenant_bar['id'],
- 'enabled': True,
- },
- },
- token=token,
- expected_status=http_client.OK)
-
- user_id = self._get_user_id(r.result)
-
- # Check if member_role is in tenant_bar
- r = self.admin_request(
- path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': self.tenant_bar['id'],
- 'user_id': user_id
- },
- token=token,
- expected_status=http_client.OK)
- self.assertEqual(CONF.member_role_name, self._get_role_name(r.result))
-
- # Create a new tenant
- r = self.admin_request(
- method='POST',
- path='/v2.0/tenants',
- body={
- 'tenant': {
- 'name': 'test_update_user',
- 'description': 'A description ...',
- 'enabled': True,
- },
- },
- token=token,
- expected_status=http_client.OK)
-
- project_id = self._get_project_id(r.result)
-
- # Update user's tenant
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%(user_id)s' % {
- 'user_id': user_id,
- },
- body={
- 'user': {
- 'tenantId': project_id,
- },
- },
- token=token,
- expected_status=http_client.OK)
-
- # 'member_role' should be in new_tenant
- r = self.admin_request(
- path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': project_id,
- 'user_id': user_id
- },
- token=token,
- expected_status=http_client.OK)
- self.assertEqual('_member_', self._get_role_name(r.result))
-
- # 'member_role' should not be in tenant_bar any more
- r = self.admin_request(
- path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': self.tenant_bar['id'],
- 'user_id': user_id
- },
- token=token,
- expected_status=http_client.OK)
- self.assertNoRoles(r.result)
-
- def test_update_user_with_invalid_tenant(self):
- token = self.get_scoped_token()
-
- # Create a new user
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': 'test_invalid_tenant',
- 'password': uuid.uuid4().hex,
- 'tenantId': self.tenant_bar['id'],
- 'enabled': True,
- },
- },
- token=token,
- expected_status=http_client.OK)
- user_id = self._get_user_id(r.result)
-
- # Update user with an invalid tenant
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%(user_id)s' % {
- 'user_id': user_id,
- },
- body={
- 'user': {
- 'tenantId': 'abcde12345heha',
- },
- },
- token=token,
- expected_status=http_client.NOT_FOUND)
-
- def test_update_user_with_invalid_tenant_no_prev_tenant(self):
- token = self.get_scoped_token()
-
- # Create a new user
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': 'test_invalid_tenant',
- 'password': uuid.uuid4().hex,
- 'enabled': True,
- },
- },
- token=token,
- expected_status=http_client.OK)
- user_id = self._get_user_id(r.result)
-
- # Update user with an invalid tenant
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%(user_id)s' % {
- 'user_id': user_id,
- },
- body={
- 'user': {
- 'tenantId': 'abcde12345heha',
- },
- },
- token=token,
- expected_status=http_client.NOT_FOUND)
-
- def test_update_user_with_old_tenant(self):
- token = self.get_scoped_token()
-
- # Create a new user
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- 'tenantId': self.tenant_bar['id'],
- 'enabled': True,
- },
- },
- token=token,
- expected_status=http_client.OK)
-
- user_id = self._get_user_id(r.result)
-
- # Check if member_role is in tenant_bar
- r = self.admin_request(
- path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': self.tenant_bar['id'],
- 'user_id': user_id
- },
- token=token,
- expected_status=http_client.OK)
- self.assertEqual(CONF.member_role_name, self._get_role_name(r.result))
-
- # Update user's tenant with old tenant id
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%(user_id)s' % {
- 'user_id': user_id,
- },
- body={
- 'user': {
- 'tenantId': self.tenant_bar['id'],
- },
- },
- token=token,
- expected_status=http_client.OK)
-
- # 'member_role' should still be in tenant_bar
- r = self.admin_request(
- path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': self.tenant_bar['id'],
- 'user_id': user_id
- },
- token=token,
- expected_status=http_client.OK)
- self.assertEqual('_member_', self._get_role_name(r.result))
-
- def test_authenticating_a_user_with_no_password(self):
- token = self.get_scoped_token()
-
- username = uuid.uuid4().hex
-
- # create the user
- self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': username,
- 'enabled': True,
- },
- },
- token=token)
-
- # fail to authenticate
- r = self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'passwordCredentials': {
- 'username': username,
- 'password': 'password',
- },
- },
- },
- expected_status=http_client.UNAUTHORIZED)
- self.assertValidErrorResponse(r)
-
- def test_www_authenticate_header(self):
- r = self.public_request(
- path='/v2.0/tenants',
- expected_status=http_client.UNAUTHORIZED)
- self.assertEqual('Keystone uri="http://localhost"',
- r.headers.get('WWW-Authenticate'))
-
- def test_www_authenticate_header_host(self):
- test_url = 'http://%s:4187' % uuid.uuid4().hex
- self.config_fixture.config(public_endpoint=test_url)
- r = self.public_request(
- path='/v2.0/tenants',
- expected_status=http_client.UNAUTHORIZED)
- self.assertEqual('Keystone uri="%s"' % test_url,
- r.headers.get('WWW-Authenticate'))
-
-
-class LegacyV2UsernameTests(object):
- """Tests to show the broken username behavior in V2.
-
- The V2 API is documented to use `username` instead of `name`. The
- API forced used to use name and left the username to fall into the
- `extra` field.
-
- These tests ensure this behavior works so fixes to `username`/`name`
- will be backward compatible.
- """
-
- def create_user(self, **user_attrs):
- """Creates a users and returns the response object.
-
- :param user_attrs: attributes added to the request body (optional)
- """
- token = self.get_scoped_token()
- body = {
- 'user': {
- 'name': uuid.uuid4().hex,
- 'enabled': True,
- },
- }
- body['user'].update(user_attrs)
-
- return self.admin_request(
- method='POST',
- path='/v2.0/users',
- token=token,
- body=body,
- expected_status=http_client.OK)
-
- def test_create_with_extra_username(self):
- """The response for creating a user will contain the extra fields."""
- fake_username = uuid.uuid4().hex
- r = self.create_user(username=fake_username)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(fake_username, user.get('username'))
-
- def test_get_returns_username_from_extra(self):
- """The response for getting a user will contain the extra fields."""
- token = self.get_scoped_token()
-
- fake_username = uuid.uuid4().hex
- r = self.create_user(username=fake_username)
-
- id_ = self.get_user_attribute_from_response(r, 'id')
- r = self.admin_request(path='/v2.0/users/%s' % id_, token=token)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(fake_username, user.get('username'))
-
- def test_update_returns_new_username_when_adding_username(self):
- """The response for updating a user will contain the extra fields.
-
- This is specifically testing for updating a username when a value
- was not previously set.
- """
- token = self.get_scoped_token()
-
- r = self.create_user()
-
- id_ = self.get_user_attribute_from_response(r, 'id')
- name = self.get_user_attribute_from_response(r, 'name')
- enabled = self.get_user_attribute_from_response(r, 'enabled')
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%s' % id_,
- token=token,
- body={
- 'user': {
- 'name': name,
- 'username': 'new_username',
- 'enabled': enabled,
- },
- },
- expected_status=http_client.OK)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual('new_username', user.get('username'))
-
- def test_update_returns_new_username_when_updating_username(self):
- """The response for updating a user will contain the extra fields.
-
- This tests updating a username that was previously set.
- """
- token = self.get_scoped_token()
-
- r = self.create_user(username='original_username')
-
- id_ = self.get_user_attribute_from_response(r, 'id')
- name = self.get_user_attribute_from_response(r, 'name')
- enabled = self.get_user_attribute_from_response(r, 'enabled')
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%s' % id_,
- token=token,
- body={
- 'user': {
- 'name': name,
- 'username': 'new_username',
- 'enabled': enabled,
- },
- },
- expected_status=http_client.OK)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual('new_username', user.get('username'))
-
- def test_username_is_always_returned_create(self):
- """Username is set as the value of name if no username is provided.
-
- This matches the v2.0 spec where we really should be using username
- and not name.
- """
- r = self.create_user()
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(user.get('name'), user.get('username'))
-
- def test_username_is_always_returned_get(self):
- """Username is set as the value of name if no username is provided.
-
- This matches the v2.0 spec where we really should be using username
- and not name.
- """
- token = self.get_scoped_token()
-
- r = self.create_user()
-
- id_ = self.get_user_attribute_from_response(r, 'id')
- r = self.admin_request(path='/v2.0/users/%s' % id_, token=token)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(user.get('name'), user.get('username'))
-
- def test_username_is_always_returned_get_by_name(self):
- """Username is set as the value of name if no username is provided.
-
- This matches the v2.0 spec where we really should be using username
- and not name.
- """
- token = self.get_scoped_token()
-
- r = self.create_user()
-
- name = self.get_user_attribute_from_response(r, 'name')
- r = self.admin_request(path='/v2.0/users?name=%s' % name, token=token)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(user.get('name'), user.get('username'))
-
- def test_username_is_always_returned_update_no_username_provided(self):
- """Username is set as the value of name if no username is provided.
-
- This matches the v2.0 spec where we really should be using username
- and not name.
- """
- token = self.get_scoped_token()
-
- r = self.create_user()
-
- id_ = self.get_user_attribute_from_response(r, 'id')
- name = self.get_user_attribute_from_response(r, 'name')
- enabled = self.get_user_attribute_from_response(r, 'enabled')
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%s' % id_,
- token=token,
- body={
- 'user': {
- 'name': name,
- 'enabled': enabled,
- },
- },
- expected_status=http_client.OK)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(user.get('name'), user.get('username'))
-
- def test_updated_username_is_returned(self):
- """Username is set as the value of name if no username is provided.
-
- This matches the v2.0 spec where we really should be using username
- and not name.
- """
- token = self.get_scoped_token()
-
- r = self.create_user()
-
- id_ = self.get_user_attribute_from_response(r, 'id')
- name = self.get_user_attribute_from_response(r, 'name')
- enabled = self.get_user_attribute_from_response(r, 'enabled')
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%s' % id_,
- token=token,
- body={
- 'user': {
- 'name': name,
- 'enabled': enabled,
- },
- },
- expected_status=http_client.OK)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(user.get('name'), user.get('username'))
-
- def test_username_can_be_used_instead_of_name_create(self):
- token = self.get_scoped_token()
-
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- token=token,
- body={
- 'user': {
- 'username': uuid.uuid4().hex,
- 'enabled': True,
- },
- },
- expected_status=http_client.OK)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(user.get('name'), user.get('username'))
-
- def test_username_can_be_used_instead_of_name_update(self):
- token = self.get_scoped_token()
-
- r = self.create_user()
-
- id_ = self.get_user_attribute_from_response(r, 'id')
- new_username = uuid.uuid4().hex
- enabled = self.get_user_attribute_from_response(r, 'enabled')
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%s' % id_,
- token=token,
- body={
- 'user': {
- 'username': new_username,
- 'enabled': enabled,
- },
- },
- expected_status=http_client.OK)
-
- self.assertValidUserResponse(r)
-
- user = self.get_user_from_response(r)
- self.assertEqual(new_username, user.get('name'))
- self.assertEqual(user.get('name'), user.get('username'))
-
-
-class RestfulTestCase(rest.RestfulTestCase):
-
- def setUp(self):
- super(RestfulTestCase, self).setUp()
-
- # TODO(termie): add an admin user to the fixtures and use that user
- # override the fixtures, for now
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_admin['id'])
-
-
-class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
-
- def config_overrides(self):
- super(V2TestCase, self).config_overrides()
- self.config_fixture.config(
- group='catalog',
- driver='templated',
- template_file=unit.dirs.tests('default_catalog.templates'))
-
- def _get_user_id(self, r):
- return r['user']['id']
-
- def _get_role_name(self, r):
- return r['roles'][0]['name']
-
- def _get_role_id(self, r):
- return r['roles'][0]['id']
-
- def _get_project_id(self, r):
- return r['tenant']['id']
-
- def _get_token_id(self, r):
- return r.result['access']['token']['id']
-
- def assertNoRoles(self, r):
- self.assertEqual([], r['roles'])
-
- def assertValidErrorResponse(self, r):
- self.assertIsNotNone(r.result.get('error'))
- self.assertValidError(r.result['error'])
- self.assertEqual(r.result['error']['code'], r.status_code)
-
- def assertValidExtension(self, extension, expected):
- super(V2TestCase, self).assertValidExtension(extension)
- descriptions = [ext['description'] for ext in six.itervalues(expected)]
- description = extension.get('description')
- self.assertIsNotNone(description)
- self.assertIn(description, descriptions)
- self.assertIsNotNone(extension.get('links'))
- self.assertNotEmpty(extension.get('links'))
- for link in extension.get('links'):
- self.assertValidExtensionLink(link)
-
- def assertValidExtensionListResponse(self, r, expected):
- self.assertIsNotNone(r.result.get('extensions'))
- self.assertIsNotNone(r.result['extensions'].get('values'))
- self.assertNotEmpty(r.result['extensions'].get('values'))
- for extension in r.result['extensions']['values']:
- self.assertValidExtension(extension, expected)
-
- def assertValidExtensionResponse(self, r, expected):
- self.assertValidExtension(r.result.get('extension'), expected)
-
- def assertValidUser(self, user):
- super(V2TestCase, self).assertValidUser(user)
- self.assertNotIn('default_project_id', user)
- if 'tenantId' in user:
- # NOTE(morganfainberg): tenantId should never be "None", it gets
- # filtered out of the object if it is there. This is suspenders
- # and a belt check to avoid unintended regressions.
- self.assertIsNotNone(user.get('tenantId'))
-
- def assertValidAuthenticationResponse(self, r,
- require_service_catalog=False):
- self.assertIsNotNone(r.result.get('access'))
- self.assertIsNotNone(r.result['access'].get('token'))
- self.assertIsNotNone(r.result['access'].get('user'))
-
- # validate token
- self.assertIsNotNone(r.result['access']['token'].get('id'))
- self.assertIsNotNone(r.result['access']['token'].get('expires'))
- tenant = r.result['access']['token'].get('tenant')
- if tenant is not None:
- # validate tenant
- self.assertIsNotNone(tenant.get('id'))
- self.assertIsNotNone(tenant.get('name'))
-
- # validate user
- self.assertIsNotNone(r.result['access']['user'].get('id'))
- self.assertIsNotNone(r.result['access']['user'].get('name'))
-
- if require_service_catalog:
- # roles are only provided with a service catalog
- roles = r.result['access']['user'].get('roles')
- self.assertNotEmpty(roles)
- for role in roles:
- self.assertIsNotNone(role.get('name'))
-
- serviceCatalog = r.result['access'].get('serviceCatalog')
- # validate service catalog
- if require_service_catalog:
- self.assertIsNotNone(serviceCatalog)
- if serviceCatalog is not None:
- self.assertIsInstance(serviceCatalog, list)
- if require_service_catalog:
- self.assertNotEmpty(serviceCatalog)
- for service in r.result['access']['serviceCatalog']:
- # validate service
- self.assertIsNotNone(service.get('name'))
- self.assertIsNotNone(service.get('type'))
-
- # services contain at least one endpoint
- self.assertIsNotNone(service.get('endpoints'))
- self.assertNotEmpty(service['endpoints'])
- for endpoint in service['endpoints']:
- # validate service endpoint
- self.assertIsNotNone(endpoint.get('publicURL'))
-
- def assertValidTenantListResponse(self, r):
- self.assertIsNotNone(r.result.get('tenants'))
- self.assertNotEmpty(r.result['tenants'])
- for tenant in r.result['tenants']:
- self.assertValidTenant(tenant)
- self.assertIsNotNone(tenant.get('enabled'))
- self.assertIn(tenant.get('enabled'), [True, False])
-
- def assertValidUserResponse(self, r):
- self.assertIsNotNone(r.result.get('user'))
- self.assertValidUser(r.result['user'])
-
- def assertValidTenantResponse(self, r):
- self.assertIsNotNone(r.result.get('tenant'))
- self.assertValidTenant(r.result['tenant'])
-
- def assertValidRoleListResponse(self, r):
- self.assertIsNotNone(r.result.get('roles'))
- self.assertNotEmpty(r.result['roles'])
- for role in r.result['roles']:
- self.assertValidRole(role)
-
- def assertValidVersion(self, version):
- super(V2TestCase, self).assertValidVersion(version)
-
- self.assertIsNotNone(version.get('links'))
- self.assertNotEmpty(version.get('links'))
- for link in version.get('links'):
- self.assertIsNotNone(link.get('rel'))
- self.assertIsNotNone(link.get('href'))
-
- self.assertIsNotNone(version.get('media-types'))
- self.assertNotEmpty(version.get('media-types'))
- for media in version.get('media-types'):
- self.assertIsNotNone(media.get('base'))
- self.assertIsNotNone(media.get('type'))
-
- def assertValidMultipleChoiceResponse(self, r):
- self.assertIsNotNone(r.result.get('versions'))
- self.assertIsNotNone(r.result['versions'].get('values'))
- self.assertNotEmpty(r.result['versions']['values'])
- for version in r.result['versions']['values']:
- self.assertValidVersion(version)
-
- def assertValidVersionResponse(self, r):
- self.assertValidVersion(r.result.get('version'))
-
- def assertValidEndpointListResponse(self, r):
- self.assertIsNotNone(r.result.get('endpoints'))
- self.assertNotEmpty(r.result['endpoints'])
- for endpoint in r.result['endpoints']:
- self.assertIsNotNone(endpoint.get('id'))
- self.assertIsNotNone(endpoint.get('name'))
- self.assertIsNotNone(endpoint.get('type'))
- self.assertIsNotNone(endpoint.get('publicURL'))
- self.assertIsNotNone(endpoint.get('internalURL'))
- self.assertIsNotNone(endpoint.get('adminURL'))
-
- def get_user_from_response(self, r):
- return r.result.get('user')
-
- def get_user_attribute_from_response(self, r, attribute_name):
- return r.result['user'][attribute_name]
-
- def test_service_crud_requires_auth(self):
- """Service CRUD should return unauthorized without an X-Auth-Token."""
- # values here don't matter because it will be unauthorized before
- # they're checked (bug 1006822).
- service_path = '/v2.0/OS-KSADM/services/%s' % uuid.uuid4().hex
- service_body = {
- 'OS-KSADM:service': {
- 'name': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- },
- }
-
- r = self.admin_request(method='GET',
- path='/v2.0/OS-KSADM/services',
- expected_status=http_client.UNAUTHORIZED)
- self.assertValidErrorResponse(r)
-
- r = self.admin_request(method='POST',
- path='/v2.0/OS-KSADM/services',
- body=service_body,
- expected_status=http_client.UNAUTHORIZED)
- self.assertValidErrorResponse(r)
-
- r = self.admin_request(method='GET',
- path=service_path,
- expected_status=http_client.UNAUTHORIZED)
- self.assertValidErrorResponse(r)
-
- r = self.admin_request(method='DELETE',
- path=service_path,
- expected_status=http_client.UNAUTHORIZED)
- self.assertValidErrorResponse(r)
-
- def test_user_role_list_requires_auth(self):
- """User role list return unauthorized without an X-Auth-Token."""
- # values here don't matter because it will be unauthorized before
- # they're checked (bug 1006815).
- path = '/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % {
- 'tenant_id': uuid.uuid4().hex,
- 'user_id': uuid.uuid4().hex,
- }
-
- r = self.admin_request(path=path,
- expected_status=http_client.UNAUTHORIZED)
- self.assertValidErrorResponse(r)
-
- def test_fetch_revocation_list_nonadmin_fails(self):
- self.admin_request(
- method='GET',
- path='/v2.0/tokens/revoked',
- expected_status=http_client.UNAUTHORIZED)
-
- def test_fetch_revocation_list_admin_200(self):
- token = self.get_scoped_token()
- r = self.admin_request(
- method='GET',
- path='/v2.0/tokens/revoked',
- token=token,
- expected_status=http_client.OK)
- self.assertValidRevocationListResponse(r)
-
- def assertValidRevocationListResponse(self, response):
- self.assertIsNotNone(response.result['signed'])
-
- def _fetch_parse_revocation_list(self):
-
- token1 = self.get_scoped_token()
-
- # TODO(morganfainberg): Because this is making a restful call to the
- # app a change to UTCNOW via mock.patch will not affect the returned
- # token. The only surefire way to ensure there is not a transient bug
- # based upon when the second token is issued is with a sleep. This
- # issue all stems from the limited resolution (no microseconds) on the
- # expiry time of tokens and the way revocation events utilizes token
- # expiry to revoke individual tokens. This is a stop-gap until all
- # associated issues with resolution on expiration and revocation events
- # are resolved.
- time.sleep(1)
-
- token2 = self.get_scoped_token()
-
- self.admin_request(method='DELETE',
- path='/v2.0/tokens/%s' % token2,
- token=token1)
-
- r = self.admin_request(
- method='GET',
- path='/v2.0/tokens/revoked',
- token=token1,
- expected_status=http_client.OK)
- signed_text = r.result['signed']
-
- data_json = cms.cms_verify(signed_text, CONF.signing.certfile,
- CONF.signing.ca_certs)
-
- data = json.loads(data_json)
-
- return (data, token2)
-
- def test_fetch_revocation_list_md5(self):
- """Hash for tokens in revocation list and server config should match.
-
- If the server is configured for md5, then the revocation list has
- tokens hashed with MD5.
- """
- # The default hash algorithm is md5.
- hash_algorithm = 'md5'
-
- (data, token) = self._fetch_parse_revocation_list()
- token_hash = cms.cms_hash_token(token, mode=hash_algorithm)
- self.assertThat(token_hash, matchers.Equals(data['revoked'][0]['id']))
-
- def test_fetch_revocation_list_sha256(self):
- """Hash for tokens in revocation list and server config should match.
-
- If the server is configured for sha256, then the revocation list has
- tokens hashed with SHA256.
- """
- hash_algorithm = 'sha256'
- self.config_fixture.config(group='token',
- hash_algorithm=hash_algorithm)
-
- (data, token) = self._fetch_parse_revocation_list()
- token_hash = cms.cms_hash_token(token, mode=hash_algorithm)
- self.assertThat(token_hash, matchers.Equals(data['revoked'][0]['id']))
-
- def test_create_update_user_invalid_enabled_type(self):
- # Enforce usage of boolean for 'enabled' field
- token = self.get_scoped_token()
-
- # Test CREATE request
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- # In JSON, "true|false" are not boolean
- 'enabled': "true",
- },
- },
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(r)
-
- # Test UPDATE request
- r = self.admin_request(
- method='PUT',
- path='/v2.0/users/%(user_id)s' % {
- 'user_id': self.user_foo['id'],
- },
- body={
- 'user': {
- # In JSON, "true|false" are not boolean
- 'enabled': "true",
- },
- },
- token=token,
- expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(r)
-
- def test_authenticating_a_user_with_an_OSKSADM_password(self):
- token = self.get_scoped_token()
-
- username = uuid.uuid4().hex
- password = uuid.uuid4().hex
-
- # create the user
- r = self.admin_request(
- method='POST',
- path='/v2.0/users',
- body={
- 'user': {
- 'name': username,
- 'OS-KSADM:password': password,
- 'enabled': True,
- },
- },
- token=token)
-
- # successfully authenticate
- self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'passwordCredentials': {
- 'username': username,
- 'password': password,
- },
- },
- },
- expected_status=http_client.OK)
-
- # ensure password doesn't leak
- user_id = r.result['user']['id']
- r = self.admin_request(
- method='GET',
- path='/v2.0/users/%s' % user_id,
- token=token,
- expected_status=http_client.OK)
- self.assertNotIn('OS-KSADM:password', r.result['user'])
-
- def test_updating_a_user_with_an_OSKSADM_password(self):
- token = self.get_scoped_token()
-
- user_id = self.user_foo['id']
- password = uuid.uuid4().hex
-
- # update the user
- self.admin_request(
- method='PUT',
- path='/v2.0/users/%s/OS-KSADM/password' % user_id,
- body={
- 'user': {
- 'password': password,
- },
- },
- token=token,
- expected_status=http_client.OK)
-
- # successfully authenticate
- self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': password,
- },
- },
- },
- expected_status=http_client.OK)
-
-
-class RevokeApiTestCase(V2TestCase):
- def config_overrides(self):
- super(RevokeApiTestCase, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='pki',
- revoke_by_id=False)
-
- def test_fetch_revocation_list_admin_200(self):
- self.skipTest('Revoke API disables revocation_list.')
-
- def test_fetch_revocation_list_md5(self):
- self.skipTest('Revoke API disables revocation_list.')
-
- def test_fetch_revocation_list_sha256(self):
- self.skipTest('Revoke API disables revocation_list.')
-
-
-class TestFernetTokenProviderV2(RestfulTestCase):
-
- def setUp(self):
- super(TestFernetTokenProviderV2, self).setUp()
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- # Add catalog data
- self.region = unit.new_region_ref()
- self.region_id = self.region['id']
- self.catalog_api.create_region(self.region)
-
- self.service = unit.new_service_ref()
- self.service_id = self.service['id']
- self.catalog_api.create_service(self.service_id, self.service)
-
- self.endpoint = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- self.endpoint_id = self.endpoint['id']
- self.catalog_api.create_endpoint(self.endpoint_id, self.endpoint)
-
- def assertValidUnscopedTokenResponse(self, r):
- v2.unscoped_validator.validate(r.json['access'])
-
- def assertValidScopedTokenResponse(self, r):
- v2.scoped_validator.validate(r.json['access'])
-
- # Used by RestfulTestCase
- def _get_token_id(self, r):
- return r.result['access']['token']['id']
-
- def new_project_ref(self):
- return {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'domain_id': 'default',
- 'enabled': True}
-
- def config_overrides(self):
- super(TestFernetTokenProviderV2, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
-
- def test_authenticate_unscoped_token(self):
- unscoped_token = self.get_unscoped_token()
- # Fernet token must be of length 255 per usability requirements
- self.assertLess(len(unscoped_token), 255)
-
- def test_validate_unscoped_token(self):
- # Grab an admin token to validate with
- project_ref = self.new_project_ref()
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
- project_ref['id'],
- self.role_admin['id'])
- admin_token = self.get_scoped_token(tenant_id=project_ref['id'])
- unscoped_token = self.get_unscoped_token()
- path = ('/v2.0/tokens/%s' % unscoped_token)
- resp = self.admin_request(
- method='GET',
- path=path,
- token=admin_token,
- expected_status=http_client.OK)
- self.assertValidUnscopedTokenResponse(resp)
-
- def test_authenticate_scoped_token(self):
- project_ref = self.new_project_ref()
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], project_ref['id'], self.role_service['id'])
- token = self.get_scoped_token(tenant_id=project_ref['id'])
- # Fernet token must be of length 255 per usability requirements
- self.assertLess(len(token), 255)
-
- def test_validate_scoped_token(self):
- project_ref = self.new_project_ref()
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
- project_ref['id'],
- self.role_admin['id'])
- project2_ref = self.new_project_ref()
- self.resource_api.create_project(project2_ref['id'], project2_ref)
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'], project2_ref['id'], self.role_member['id'])
- admin_token = self.get_scoped_token(tenant_id=project_ref['id'])
- member_token = self.get_scoped_token(tenant_id=project2_ref['id'])
- path = ('/v2.0/tokens/%s?belongsTo=%s' % (member_token,
- project2_ref['id']))
- # Validate token belongs to project
- resp = self.admin_request(
- method='GET',
- path=path,
- token=admin_token,
- expected_status=http_client.OK)
- self.assertValidScopedTokenResponse(resp)
-
- def test_token_authentication_and_validation(self):
- """Test token authentication for Fernet token provider.
-
- Verify that token authentication returns validate response code and
- valid token belongs to project.
- """
- project_ref = self.new_project_ref()
- self.resource_api.create_project(project_ref['id'], project_ref)
- unscoped_token = self.get_unscoped_token()
- self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
- project_ref['id'],
- self.role_admin['id'])
- r = self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'tenantName': project_ref['name'],
- 'token': {
- 'id': unscoped_token.encode('ascii')
- }
- }
- },
- expected_status=http_client.OK)
-
- token_id = self._get_token_id(r)
- path = ('/v2.0/tokens/%s?belongsTo=%s' % (token_id, project_ref['id']))
- # Validate token belongs to project
- resp = self.admin_request(
- method='GET',
- path=path,
- token=self.get_admin_token(),
- expected_status=http_client.OK)
- self.assertValidScopedTokenResponse(resp)
-
- def test_rescoped_tokens_maintain_original_expiration(self):
- project_ref = self.new_project_ref()
- self.resource_api.create_project(project_ref['id'], project_ref)
- self.assignment_api.add_role_to_user_and_project(self.user_foo['id'],
- project_ref['id'],
- self.role_admin['id'])
- resp = self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'tenantName': project_ref['name'],
- 'passwordCredentials': {
- 'username': self.user_foo['name'],
- 'password': self.user_foo['password']
- }
- }
- },
- # NOTE(lbragstad): This test may need to be refactored if Keystone
- # decides to disallow rescoping using a scoped token.
- expected_status=http_client.OK)
- original_token = resp.result['access']['token']['id']
- original_expiration = resp.result['access']['token']['expires']
-
- resp = self.public_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'tenantName': project_ref['name'],
- 'token': {
- 'id': original_token,
- }
- }
- },
- expected_status=http_client.OK)
- rescoped_token = resp.result['access']['token']['id']
- rescoped_expiration = resp.result['access']['token']['expires']
- self.assertNotEqual(original_token, rescoped_token)
- self.assertEqual(original_expiration, rescoped_expiration)
- self.assertValidScopedTokenResponse(resp)
diff --git a/keystone-moon/keystone/tests/unit/test_v2_controller.py b/keystone-moon/keystone/tests/unit/test_v2_controller.py
deleted file mode 100644
index 6cf8bc53..00000000
--- a/keystone-moon/keystone/tests/unit/test_v2_controller.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import copy
-import uuid
-
-from testtools import matchers
-
-from keystone.assignment import controllers as assignment_controllers
-from keystone import exception
-from keystone.resource import controllers as resource_controllers
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import database
-
-
-_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}}
-
-
-class TenantTestCase(unit.TestCase):
- """Tests for the V2 Tenant controller.
-
- These tests exercise :class:`keystone.assignment.controllers.Tenant`.
-
- """
-
- def setUp(self):
- super(TenantTestCase, self).setUp()
- self.useFixture(database.Database())
- self.load_backends()
- self.load_fixtures(default_fixtures)
- self.tenant_controller = resource_controllers.Tenant()
- self.assignment_tenant_controller = (
- assignment_controllers.TenantAssignment())
- self.assignment_role_controller = (
- assignment_controllers.RoleAssignmentV2())
-
- def test_get_project_users_no_user(self):
- """get_project_users when user doesn't exist.
-
- When a user that's not known to `identity` has a role on a project,
- then `get_project_users` just skips that user.
-
- """
- project_id = self.tenant_bar['id']
-
- orig_project_users = (
- self.assignment_tenant_controller.get_project_users(_ADMIN_CONTEXT,
- project_id))
-
- # Assign a role to a user that doesn't exist to the `bar` project.
-
- user_id = uuid.uuid4().hex
- self.assignment_role_controller.add_role_to_user(
- _ADMIN_CONTEXT, user_id, self.role_other['id'], project_id)
-
- new_project_users = (
- self.assignment_tenant_controller.get_project_users(_ADMIN_CONTEXT,
- project_id))
-
- # The new user isn't included in the result, so no change.
- # asserting that the expected values appear in the list,
- # without asserting the order of the results
- self.assertEqual(sorted(orig_project_users), sorted(new_project_users))
-
- def test_list_projects_default_domain(self):
- """Test that list projects only returns those in the default domain."""
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- # Check the real total number of projects, we should have the:
- # - tenants in the default fixtures
- # - the project representing the default domain
- # - the project representing the domain we created above
- # - the project we created above
- refs = self.resource_api.list_projects()
- self.assertThat(
- refs, matchers.HasLength(len(default_fixtures.TENANTS) + 3))
-
- # Now list all projects using the v2 API - we should only get
- # back those in the default features, since only those are in the
- # default domain.
- refs = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT)
- self.assertEqual(len(default_fixtures.TENANTS), len(refs['tenants']))
- for tenant in default_fixtures.TENANTS:
- tenant_copy = tenant.copy()
- tenant_copy.pop('domain_id')
- tenant_copy.pop('parent_id')
- tenant_copy.pop('is_domain')
- self.assertIn(tenant_copy, refs['tenants'])
-
- def _create_is_domain_project(self):
- project = unit.new_project_ref(is_domain=True)
- project_ref = self.resource_api.create_project(project['id'], project)
- return self.tenant_controller.v3_to_v2_project(project_ref)
-
- def test_get_is_domain_project_not_found(self):
- """Test that get project does not return is_domain projects."""
- project = self._create_is_domain_project()
-
- context = copy.deepcopy(_ADMIN_CONTEXT)
- context['query_string']['name'] = project['name']
-
- self.assertRaises(
- exception.ProjectNotFound,
- self.tenant_controller.get_all_projects,
- context)
-
- context = copy.deepcopy(_ADMIN_CONTEXT)
- context['query_string']['name'] = project['id']
-
- self.assertRaises(
- exception.ProjectNotFound,
- self.tenant_controller.get_all_projects,
- context)
-
- def test_create_is_domain_project_fails(self):
- """Test that the creation of a project acting as a domain fails."""
- project = {'name': uuid.uuid4().hex, 'domain_id': 'default',
- 'is_domain': True}
-
- self.assertRaises(
- exception.ValidationError,
- self.tenant_controller.create_project,
- _ADMIN_CONTEXT,
- project)
-
- def test_create_project_passing_is_domain_false_fails(self):
- """Test that passing is_domain=False is not allowed."""
- project = {'name': uuid.uuid4().hex, 'domain_id': 'default',
- 'is_domain': False}
-
- self.assertRaises(
- exception.ValidationError,
- self.tenant_controller.create_project,
- _ADMIN_CONTEXT,
- project)
-
- def test_update_is_domain_project_not_found(self):
- """Test that update is_domain project is not allowed in v2."""
- project = self._create_is_domain_project()
-
- project['name'] = uuid.uuid4().hex
- self.assertRaises(
- exception.ProjectNotFound,
- self.tenant_controller.update_project,
- _ADMIN_CONTEXT,
- project['id'],
- project)
-
- def test_delete_is_domain_project_not_found(self):
- """Test that delete is_domain project is not allowed in v2."""
- project = self._create_is_domain_project()
-
- self.assertRaises(
- exception.ProjectNotFound,
- self.tenant_controller.delete_project,
- _ADMIN_CONTEXT,
- project['id'])
-
- def test_list_is_domain_project_not_found(self):
- """Test v2 get_all_projects having projects that act as a domain.
-
- In v2 no project with the is_domain flag enabled should be returned.
- """
- project1 = self._create_is_domain_project()
- project2 = self._create_is_domain_project()
-
- refs = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT)
- projects = refs.get('tenants')
-
- self.assertNotIn(project1, projects)
- self.assertNotIn(project2, projects)
diff --git a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py
deleted file mode 100644
index 2a3fad86..00000000
--- a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py
+++ /dev/null
@@ -1,1376 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import uuid
-
-from keystoneclient.contrib.ec2 import utils as ec2_utils
-from keystoneclient import exceptions as client_exceptions
-from keystoneclient.v2_0 import client as ks_client
-import mock
-from oslo_config import cfg
-from oslo_serialization import jsonutils
-from oslo_utils import timeutils
-from six.moves import http_client
-from six.moves import range
-import webob
-
-from keystone.tests import unit
-from keystone.tests.unit import default_fixtures
-from keystone.tests.unit.ksfixtures import appserver
-from keystone.tests.unit.ksfixtures import database
-
-
-CONF = cfg.CONF
-DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
-
-
-class ClientDrivenTestCase(unit.TestCase):
-
- def config_files(self):
- config_files = super(ClientDrivenTestCase, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def setUp(self):
- super(ClientDrivenTestCase, self).setUp()
-
- # FIXME(morganfainberg): Since we are running tests through the
- # controllers and some internal api drivers are SQL-only, the correct
- # approach is to ensure we have the correct backing store. The
- # credential api makes some very SQL specific assumptions that should
- # be addressed allowing for non-SQL based testing to occur.
- self.useFixture(database.Database())
- self.load_backends()
-
- self.load_fixtures(default_fixtures)
-
- # TODO(termie): add an admin user to the fixtures and use that user
- # override the fixtures, for now
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_admin['id'])
-
- conf = self._paste_config('keystone')
- fixture = self.useFixture(appserver.AppServer(conf, appserver.MAIN))
- self.public_server = fixture.server
- fixture = self.useFixture(appserver.AppServer(conf, appserver.ADMIN))
- self.admin_server = fixture.server
-
- self.default_client = self.get_client()
-
- self.addCleanup(self.cleanup_instance('public_server', 'admin_server',
- 'default_client'))
-
- def _public_url(self):
- public_port = self.public_server.socket_info['socket'][1]
- return "http://localhost:%s/v2.0" % public_port
-
- def _admin_url(self):
- admin_port = self.admin_server.socket_info['socket'][1]
- return "http://localhost:%s/v2.0" % admin_port
-
- def _client(self, admin=False, **kwargs):
- url = self._admin_url() if admin else self._public_url()
- kc = ks_client.Client(endpoint=url,
- auth_url=self._public_url(),
- **kwargs)
- kc.authenticate()
- # have to manually overwrite the management url after authentication
- kc.management_url = url
- return kc
-
- def get_client(self, user_ref=None, tenant_ref=None, admin=False):
- if user_ref is None:
- user_ref = self.user_foo
- if tenant_ref is None:
- for user in default_fixtures.USERS:
- # The fixture ID is no longer used as the ID in the database
- # The fixture ID, however, is still used as part of the
- # attribute name when storing the created object on the test
- # case. This means that we need to use the fixture ID below to
- # find the actial object so that we can get the ID as stored
- # in the database to compare against.
- if (getattr(self, 'user_%s' % user['id'])['id'] ==
- user_ref['id']):
- tenant_id = user['tenants'][0]
- else:
- tenant_id = tenant_ref['id']
-
- return self._client(username=user_ref['name'],
- password=user_ref['password'],
- tenant_id=tenant_id,
- admin=admin)
-
- def test_authenticate_tenant_name_and_tenants(self):
- client = self.get_client()
- tenants = client.tenants.list()
- self.assertEqual(self.tenant_bar['id'], tenants[0].id)
-
- def test_authenticate_tenant_id_and_tenants(self):
- client = self._client(username=self.user_foo['name'],
- password=self.user_foo['password'],
- tenant_id='bar')
- tenants = client.tenants.list()
- self.assertEqual(self.tenant_bar['id'], tenants[0].id)
-
- def test_authenticate_invalid_tenant_id(self):
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- username=self.user_foo['name'],
- password=self.user_foo['password'],
- tenant_id='baz')
-
- def test_authenticate_token_no_tenant(self):
- client = self.get_client()
- token = client.auth_token
- token_client = self._client(token=token)
- tenants = token_client.tenants.list()
- self.assertEqual(self.tenant_bar['id'], tenants[0].id)
-
- def test_authenticate_token_tenant_id(self):
- client = self.get_client()
- token = client.auth_token
- token_client = self._client(token=token, tenant_id='bar')
- tenants = token_client.tenants.list()
- self.assertEqual(self.tenant_bar['id'], tenants[0].id)
-
- def test_authenticate_token_invalid_tenant_id(self):
- client = self.get_client()
- token = client.auth_token
- self.assertRaises(client_exceptions.Unauthorized,
- self._client, token=token,
- tenant_id=uuid.uuid4().hex)
-
- def test_authenticate_token_invalid_tenant_name(self):
- client = self.get_client()
- token = client.auth_token
- self.assertRaises(client_exceptions.Unauthorized,
- self._client, token=token,
- tenant_name=uuid.uuid4().hex)
-
- def test_authenticate_token_tenant_name(self):
- client = self.get_client()
- token = client.auth_token
- token_client = self._client(token=token, tenant_name='BAR')
- tenants = token_client.tenants.list()
- self.assertEqual(self.tenant_bar['id'], tenants[0].id)
- self.assertEqual(self.tenant_bar['id'], tenants[0].id)
-
- def test_authenticate_and_delete_token(self):
- client = self.get_client(admin=True)
- token = client.auth_token
- token_client = self._client(token=token)
- tenants = token_client.tenants.list()
- self.assertEqual(self.tenant_bar['id'], tenants[0].id)
-
- client.tokens.delete(token_client.auth_token)
-
- self.assertRaises(client_exceptions.Unauthorized,
- token_client.tenants.list)
-
- def test_authenticate_no_password(self):
- user_ref = self.user_foo.copy()
- user_ref['password'] = None
- self.assertRaises(client_exceptions.AuthorizationFailure,
- self.get_client,
- user_ref)
-
- def test_authenticate_no_username(self):
- user_ref = self.user_foo.copy()
- user_ref['name'] = None
- self.assertRaises(client_exceptions.AuthorizationFailure,
- self.get_client,
- user_ref)
-
- def test_authenticate_disabled_tenant(self):
- admin_client = self.get_client(admin=True)
-
- tenant = {
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': False,
- }
- tenant_ref = admin_client.tenants.create(
- tenant_name=tenant['name'],
- description=tenant['description'],
- enabled=tenant['enabled'])
- tenant['id'] = tenant_ref.id
-
- user = {
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex,
- 'email': uuid.uuid4().hex,
- 'tenant_id': tenant['id'],
- }
- user_ref = admin_client.users.create(
- name=user['name'],
- password=user['password'],
- email=user['email'],
- tenant_id=user['tenant_id'])
- user['id'] = user_ref.id
-
- # password authentication
- self.assertRaises(
- client_exceptions.Unauthorized,
- self._client,
- username=user['name'],
- password=user['password'],
- tenant_id=tenant['id'])
-
- # token authentication
- client = self._client(
- username=user['name'],
- password=user['password'])
- self.assertRaises(
- client_exceptions.Unauthorized,
- self._client,
- token=client.auth_token,
- tenant_id=tenant['id'])
-
- # FIXME(ja): this test should require the "keystone:admin" roled
- # (probably the role set via --keystone_admin_role flag)
- # FIXME(ja): add a test that admin endpoint is only sent to admin user
- # FIXME(ja): add a test that admin endpoint returns unauthorized if not
- # admin
- def test_tenant_create_update_and_delete(self):
- tenant_name = 'original_tenant'
- tenant_description = 'My original tenant!'
- tenant_enabled = True
- client = self.get_client(admin=True)
-
- # create, get, and list a tenant
- tenant = client.tenants.create(tenant_name=tenant_name,
- description=tenant_description,
- enabled=tenant_enabled)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertEqual(tenant_enabled, tenant.enabled)
-
- tenant = client.tenants.get(tenant_id=tenant.id)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertEqual(tenant_enabled, tenant.enabled)
-
- tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertEqual(tenant_enabled, tenant.enabled)
-
- # update, get, and list a tenant
- tenant_name = 'updated_tenant'
- tenant_description = 'Updated tenant!'
- tenant_enabled = False
- tenant = client.tenants.update(tenant_id=tenant.id,
- tenant_name=tenant_name,
- enabled=tenant_enabled,
- description=tenant_description)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertEqual(tenant_enabled, tenant.enabled)
-
- tenant = client.tenants.get(tenant_id=tenant.id)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertEqual(tenant_enabled, tenant.enabled)
-
- tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertEqual(tenant_enabled, tenant.enabled)
-
- # delete, get, and list a tenant
- client.tenants.delete(tenant=tenant.id)
- self.assertRaises(client_exceptions.NotFound, client.tenants.get,
- tenant.id)
- self.assertFalse([t for t in client.tenants.list()
- if t.id == tenant.id])
-
- def test_tenant_create_update_and_delete_unicode(self):
- tenant_name = u'original \u540d\u5b57'
- tenant_description = 'My original tenant!'
- tenant_enabled = True
- client = self.get_client(admin=True)
-
- # create, get, and list a tenant
- tenant = client.tenants.create(tenant_name,
- description=tenant_description,
- enabled=tenant_enabled)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertIs(tenant.enabled, tenant_enabled)
-
- tenant = client.tenants.get(tenant.id)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertIs(tenant.enabled, tenant_enabled)
-
- # multiple tenants exist due to fixtures, so find the one we're testing
- tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertIs(tenant.enabled, tenant_enabled)
-
- # update, get, and list a tenant
- tenant_name = u'updated \u540d\u5b57'
- tenant_description = 'Updated tenant!'
- tenant_enabled = False
- tenant = client.tenants.update(tenant.id,
- tenant_name=tenant_name,
- enabled=tenant_enabled,
- description=tenant_description)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertIs(tenant.enabled, tenant_enabled)
-
- tenant = client.tenants.get(tenant.id)
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertIs(tenant.enabled, tenant_enabled)
-
- tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop()
- self.assertEqual(tenant_name, tenant.name)
- self.assertEqual(tenant_description, tenant.description)
- self.assertIs(tenant.enabled, tenant_enabled)
-
- # delete, get, and list a tenant
- client.tenants.delete(tenant.id)
- self.assertRaises(client_exceptions.NotFound, client.tenants.get,
- tenant.id)
- self.assertFalse([t for t in client.tenants.list()
- if t.id == tenant.id])
-
- def test_tenant_create_no_name(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.BadRequest,
- client.tenants.create,
- tenant_name="")
-
- def test_tenant_delete_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.tenants.delete,
- tenant=uuid.uuid4().hex)
-
- def test_tenant_get_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.tenants.get,
- tenant_id=uuid.uuid4().hex)
-
- def test_tenant_update_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.tenants.update,
- tenant_id=uuid.uuid4().hex)
-
- def test_tenant_list(self):
- client = self.get_client()
- tenants = client.tenants.list()
- self.assertEqual(1, len(tenants))
-
- # Admin endpoint should return *all* tenants
- client = self.get_client(admin=True)
- tenants = client.tenants.list()
- self.assertEqual(len(default_fixtures.TENANTS), len(tenants))
-
- def test_invalid_password(self):
- good_client = self._client(username=self.user_foo['name'],
- password=self.user_foo['password'])
- good_client.tenants.list()
-
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- username=self.user_foo['name'],
- password=uuid.uuid4().hex)
-
- def test_invalid_user_and_password(self):
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- username=uuid.uuid4().hex,
- password=uuid.uuid4().hex)
-
- def test_change_password_invalidates_token(self):
- admin_client = self.get_client(admin=True)
-
- username = uuid.uuid4().hex
- password = uuid.uuid4().hex
- user = admin_client.users.create(name=username, password=password,
- email=uuid.uuid4().hex)
-
- # auth as user should work before a password change
- client = self._client(username=username, password=password)
-
- # auth as user with a token should work before a password change
- self._client(token=client.auth_token)
-
- # administrative password reset
- admin_client.users.update_password(
- user=user.id,
- password=uuid.uuid4().hex)
-
- # auth as user with original password should not work after change
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- username=username,
- password=password)
-
- # authenticate with an old token should not work after change
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- token=client.auth_token)
-
- def test_user_change_own_password_invalidates_token(self):
- # bootstrap a user as admin
- client = self.get_client(admin=True)
- username = uuid.uuid4().hex
- password = uuid.uuid4().hex
- client.users.create(name=username, password=password,
- email=uuid.uuid4().hex)
-
- # auth as user should work before a password change
- client = self._client(username=username, password=password)
-
- # auth as user with a token should work before a password change
- self._client(token=client.auth_token)
-
- # change the user's own password
- # TODO(dolphm): This should NOT raise an HTTPError at all, but rather
- # this should succeed with a 2xx. This 500 does not prevent the test
- # from demonstrating the desired consequences below, though.
- self.assertRaises(client_exceptions.HTTPError,
- client.users.update_own_password,
- password, uuid.uuid4().hex)
-
- # auth as user with original password should not work after change
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- username=username,
- password=password)
-
- # auth as user with an old token should not work after change
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- token=client.auth_token)
-
- def test_disable_tenant_invalidates_token(self):
- admin_client = self.get_client(admin=True)
- foo_client = self.get_client(self.user_foo)
- tenant_bar = admin_client.tenants.get(self.tenant_bar['id'])
-
- # Disable the tenant.
- tenant_bar.update(enabled=False)
-
- # Test that the token has been removed.
- self.assertRaises(client_exceptions.Unauthorized,
- foo_client.tokens.authenticate,
- token=foo_client.auth_token)
-
- # Test that the user access has been disabled.
- self.assertRaises(client_exceptions.Unauthorized,
- self.get_client,
- self.user_foo)
-
- def test_delete_tenant_invalidates_token(self):
- admin_client = self.get_client(admin=True)
- foo_client = self.get_client(self.user_foo)
- tenant_bar = admin_client.tenants.get(self.tenant_bar['id'])
-
- # Delete the tenant.
- tenant_bar.delete()
-
- # Test that the token has been removed.
- self.assertRaises(client_exceptions.Unauthorized,
- foo_client.tokens.authenticate,
- token=foo_client.auth_token)
-
- # Test that the user access has been disabled.
- self.assertRaises(client_exceptions.Unauthorized,
- self.get_client,
- self.user_foo)
-
- def test_disable_user_invalidates_token(self):
- admin_client = self.get_client(admin=True)
- foo_client = self.get_client(self.user_foo)
-
- admin_client.users.update_enabled(user=self.user_foo['id'],
- enabled=False)
-
- self.assertRaises(client_exceptions.Unauthorized,
- foo_client.tokens.authenticate,
- token=foo_client.auth_token)
-
- self.assertRaises(client_exceptions.Unauthorized,
- self.get_client,
- self.user_foo)
-
- def test_delete_user_invalidates_token(self):
- admin_client = self.get_client(admin=True)
- client = self.get_client(admin=False)
-
- username = uuid.uuid4().hex
- password = uuid.uuid4().hex
- user_id = admin_client.users.create(
- name=username, password=password, email=uuid.uuid4().hex).id
-
- token_id = client.tokens.authenticate(
- username=username, password=password).id
-
- # token should be usable before the user is deleted
- client.tokens.authenticate(token=token_id)
-
- admin_client.users.delete(user=user_id)
-
- # authenticate with a token should not work after the user is deleted
- self.assertRaises(client_exceptions.Unauthorized,
- client.tokens.authenticate,
- token=token_id)
-
- @mock.patch.object(timeutils, 'utcnow')
- def test_token_expiry_maintained(self, mock_utcnow):
- now = datetime.datetime.utcnow()
- mock_utcnow.return_value = now
- foo_client = self.get_client(self.user_foo)
-
- orig_token = foo_client.service_catalog.catalog['token']
- mock_utcnow.return_value = now + datetime.timedelta(seconds=1)
- reauthenticated_token = foo_client.tokens.authenticate(
- token=foo_client.auth_token)
-
- self.assertCloseEnoughForGovernmentWork(
- timeutils.parse_isotime(orig_token['expires']),
- timeutils.parse_isotime(reauthenticated_token.expires))
-
- def test_user_create_update_delete(self):
- test_username = 'new_user'
- client = self.get_client(admin=True)
- user = client.users.create(name=test_username,
- password='password',
- email='user1@test.com')
- self.assertEqual(test_username, user.name)
-
- user = client.users.get(user=user.id)
- self.assertEqual(test_username, user.name)
-
- user = client.users.update(user=user,
- name=test_username,
- email='user2@test.com')
- self.assertEqual('user2@test.com', user.email)
-
- # NOTE(termie): update_enabled doesn't return anything, probably a bug
- client.users.update_enabled(user=user, enabled=False)
- user = client.users.get(user.id)
- self.assertFalse(user.enabled)
-
- self.assertRaises(client_exceptions.Unauthorized,
- self._client,
- username=test_username,
- password='password')
- client.users.update_enabled(user, True)
-
- user = client.users.update_password(user=user, password='password2')
-
- self._client(username=test_username,
- password='password2')
-
- user = client.users.update_tenant(user=user, tenant='bar')
- # TODO(ja): once keystonelight supports default tenant
- # when you login without specifying tenant, the
- # token should be scoped to tenant 'bar'
-
- client.users.delete(user.id)
- self.assertRaises(client_exceptions.NotFound, client.users.get,
- user.id)
-
- # Test creating a user with a tenant (auto-add to tenant)
- user2 = client.users.create(name=test_username,
- password='password',
- email='user1@test.com',
- tenant_id='bar')
- self.assertEqual(test_username, user2.name)
-
- def test_update_default_tenant_to_existing_value(self):
- client = self.get_client(admin=True)
-
- user = client.users.create(
- name=uuid.uuid4().hex,
- password=uuid.uuid4().hex,
- email=uuid.uuid4().hex,
- tenant_id=self.tenant_bar['id'])
-
- # attempting to update the tenant with the existing value should work
- user = client.users.update_tenant(
- user=user, tenant=self.tenant_bar['id'])
-
- def test_user_create_no_string_password(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.BadRequest,
- client.users.create,
- name='test_user',
- password=12345,
- email=uuid.uuid4().hex)
-
- def test_user_create_no_name(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.BadRequest,
- client.users.create,
- name="",
- password=uuid.uuid4().hex,
- email=uuid.uuid4().hex)
-
- def test_user_create_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.users.create,
- name=uuid.uuid4().hex,
- password=uuid.uuid4().hex,
- email=uuid.uuid4().hex,
- tenant_id=uuid.uuid4().hex)
-
- def test_user_get_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.users.get,
- user=uuid.uuid4().hex)
-
- def test_user_list_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.users.list,
- tenant_id=uuid.uuid4().hex)
-
- def test_user_update_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.users.update,
- user=uuid.uuid4().hex)
-
- def test_user_update_tenant(self):
- client = self.get_client(admin=True)
- tenant_id = uuid.uuid4().hex
- user = client.users.update(user=self.user_foo['id'],
- tenant_id=tenant_id)
- self.assertEqual(tenant_id, user.tenant_id)
-
- def test_user_update_password_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.users.update_password,
- user=uuid.uuid4().hex,
- password=uuid.uuid4().hex)
-
- def test_user_delete_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.users.delete,
- user=uuid.uuid4().hex)
-
- def test_user_list(self):
- client = self.get_client(admin=True)
- users = client.users.list()
- self.assertTrue(len(users) > 0)
- user = users[0]
- self.assertRaises(AttributeError, lambda: user.password)
-
- def test_user_get(self):
- client = self.get_client(admin=True)
- user = client.users.get(user=self.user_foo['id'])
- self.assertRaises(AttributeError, lambda: user.password)
-
- def test_role_get(self):
- client = self.get_client(admin=True)
- role = client.roles.get(role=self.role_admin['id'])
- self.assertEqual(self.role_admin['id'], role.id)
-
- def test_role_crud(self):
- test_role = 'new_role'
- client = self.get_client(admin=True)
- role = client.roles.create(name=test_role)
- self.assertEqual(test_role, role.name)
-
- role = client.roles.get(role=role.id)
- self.assertEqual(test_role, role.name)
-
- client.roles.delete(role=role.id)
-
- self.assertRaises(client_exceptions.NotFound,
- client.roles.delete,
- role=role.id)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.get,
- role=role.id)
-
- def test_role_create_no_name(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.BadRequest,
- client.roles.create,
- name="")
-
- def test_role_create_member_role(self):
- # delete the member role so that we can recreate it
- client = self.get_client(admin=True)
- client.roles.delete(role=CONF.member_role_id)
-
- # deleting the member role revokes our token, so re-authenticate
- client = self.get_client(admin=True)
-
- # specify only the role name on creation
- role = client.roles.create(name=CONF.member_role_name)
-
- # the ID should be set as defined in CONF
- self.assertEqual(CONF.member_role_id, role.id)
-
- def test_role_get_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.get,
- role=uuid.uuid4().hex)
-
- def test_role_delete_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.delete,
- role=uuid.uuid4().hex)
-
- def test_role_list_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.roles_for_user,
- user=uuid.uuid4().hex,
- tenant=uuid.uuid4().hex)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.roles_for_user,
- user=self.user_foo['id'],
- tenant=uuid.uuid4().hex)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.roles_for_user,
- user=uuid.uuid4().hex,
- tenant=self.tenant_bar['id'])
-
- def test_role_list(self):
- client = self.get_client(admin=True)
- roles = client.roles.list()
- # TODO(devcamcar): This assert should be more specific.
- self.assertTrue(len(roles) > 0)
-
- def test_service_crud(self):
- client = self.get_client(admin=True)
-
- service_name = uuid.uuid4().hex
- service_type = uuid.uuid4().hex
- service_desc = uuid.uuid4().hex
-
- # create & read
- service = client.services.create(name=service_name,
- service_type=service_type,
- description=service_desc)
- self.assertEqual(service_name, service.name)
- self.assertEqual(service_type, service.type)
- self.assertEqual(service_desc, service.description)
-
- service = client.services.get(id=service.id)
- self.assertEqual(service_name, service.name)
- self.assertEqual(service_type, service.type)
- self.assertEqual(service_desc, service.description)
-
- service = [x for x in client.services.list() if x.id == service.id][0]
- self.assertEqual(service_name, service.name)
- self.assertEqual(service_type, service.type)
- self.assertEqual(service_desc, service.description)
-
- # update is not supported in API v2...
-
- # delete & read
- client.services.delete(id=service.id)
- self.assertRaises(client_exceptions.NotFound,
- client.services.get,
- id=service.id)
- services = [x for x in client.services.list() if x.id == service.id]
- self.assertEqual(0, len(services))
-
- def test_service_delete_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.services.delete,
- id=uuid.uuid4().hex)
-
- def test_service_get_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.services.get,
- id=uuid.uuid4().hex)
-
- def test_endpoint_delete_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.endpoints.delete,
- id=uuid.uuid4().hex)
-
- def test_admin_requires_adminness(self):
- # FIXME(ja): this should be Unauthorized
- exception = client_exceptions.ClientException
-
- two = self.get_client(self.user_two, admin=True) # non-admin user
-
- # USER CRUD
- self.assertRaises(exception,
- two.users.list)
- self.assertRaises(exception,
- two.users.get,
- user=self.user_two['id'])
- self.assertRaises(exception,
- two.users.create,
- name='oops',
- password='password',
- email='oops@test.com')
- self.assertRaises(exception,
- two.users.delete,
- user=self.user_foo['id'])
-
- # TENANT CRUD
- self.assertRaises(exception,
- two.tenants.list)
- self.assertRaises(exception,
- two.tenants.get,
- tenant_id=self.tenant_bar['id'])
- self.assertRaises(exception,
- two.tenants.create,
- tenant_name='oops',
- description="shouldn't work!",
- enabled=True)
- self.assertRaises(exception,
- two.tenants.delete,
- tenant=self.tenant_baz['id'])
-
- # ROLE CRUD
- self.assertRaises(exception,
- two.roles.get,
- role=self.role_admin['id'])
- self.assertRaises(exception,
- two.roles.list)
- self.assertRaises(exception,
- two.roles.create,
- name='oops')
- self.assertRaises(exception,
- two.roles.delete,
- role=self.role_admin['id'])
-
- # TODO(ja): MEMBERSHIP CRUD
- # TODO(ja): determine what else todo
-
- def test_tenant_add_and_remove_user(self):
- client = self.get_client(admin=True)
- client.roles.add_user_role(tenant=self.tenant_bar['id'],
- user=self.user_two['id'],
- role=self.role_other['id'])
- user_refs = client.tenants.list_users(tenant=self.tenant_bar['id'])
- self.assertIn(self.user_two['id'], [x.id for x in user_refs])
- client.roles.remove_user_role(tenant=self.tenant_bar['id'],
- user=self.user_two['id'],
- role=self.role_other['id'])
- roles = client.roles.roles_for_user(user=self.user_foo['id'],
- tenant=self.tenant_bar['id'])
- self.assertNotIn(self.role_other['id'], roles)
- user_refs = client.tenants.list_users(tenant=self.tenant_bar['id'])
- self.assertNotIn(self.user_two['id'], [x.id for x in user_refs])
-
- def test_user_role_add_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.add_user_role,
- tenant=uuid.uuid4().hex,
- user=self.user_foo['id'],
- role=self.role_member['id'])
- self.assertRaises(client_exceptions.NotFound,
- client.roles.add_user_role,
- tenant=self.tenant_baz['id'],
- user=self.user_foo['id'],
- role=uuid.uuid4().hex)
-
- def test_user_role_add_no_user(self):
- # If add_user_role and user doesn't exist, doesn't fail.
- client = self.get_client(admin=True)
- client.roles.add_user_role(tenant=self.tenant_baz['id'],
- user=uuid.uuid4().hex,
- role=self.role_member['id'])
-
- def test_user_role_remove_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.remove_user_role,
- tenant=uuid.uuid4().hex,
- user=self.user_foo['id'],
- role=self.role_member['id'])
- self.assertRaises(client_exceptions.NotFound,
- client.roles.remove_user_role,
- tenant=self.tenant_baz['id'],
- user=uuid.uuid4().hex,
- role=self.role_member['id'])
- self.assertRaises(client_exceptions.NotFound,
- client.roles.remove_user_role,
- tenant=self.tenant_baz['id'],
- user=self.user_foo['id'],
- role=uuid.uuid4().hex)
- self.assertRaises(client_exceptions.NotFound,
- client.roles.remove_user_role,
- tenant=self.tenant_baz['id'],
- user=self.user_foo['id'],
- role=self.role_member['id'])
-
- def test_tenant_list_marker(self):
- client = self.get_client()
-
- # Add two arbitrary tenants to user for testing purposes
- for i in range(2):
- tenant_id = uuid.uuid4().hex
- tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(tenant_id, tenant)
- self.assignment_api.add_user_to_project(tenant_id,
- self.user_foo['id'])
-
- tenants = client.tenants.list()
- self.assertEqual(3, len(tenants))
-
- tenants_marker = client.tenants.list(marker=tenants[0].id)
- self.assertEqual(2, len(tenants_marker))
- self.assertEqual(tenants_marker[0].name, tenants[1].name)
- self.assertEqual(tenants_marker[1].name, tenants[2].name)
-
- def test_tenant_list_marker_not_found(self):
- client = self.get_client()
- self.assertRaises(client_exceptions.BadRequest,
- client.tenants.list, marker=uuid.uuid4().hex)
-
- def test_tenant_list_limit(self):
- client = self.get_client()
-
- # Add two arbitrary tenants to user for testing purposes
- for i in range(2):
- tenant_id = uuid.uuid4().hex
- tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id,
- 'domain_id': DEFAULT_DOMAIN_ID}
- self.resource_api.create_project(tenant_id, tenant)
- self.assignment_api.add_user_to_project(tenant_id,
- self.user_foo['id'])
-
- tenants = client.tenants.list()
- self.assertEqual(3, len(tenants))
-
- tenants_limited = client.tenants.list(limit=2)
- self.assertEqual(2, len(tenants_limited))
- self.assertEqual(tenants[0].name, tenants_limited[0].name)
- self.assertEqual(tenants[1].name, tenants_limited[1].name)
-
- def test_tenant_list_limit_bad_value(self):
- client = self.get_client()
- self.assertRaises(client_exceptions.BadRequest,
- client.tenants.list, limit='a')
- self.assertRaises(client_exceptions.BadRequest,
- client.tenants.list, limit=-1)
-
- def test_roles_get_by_user(self):
- client = self.get_client(admin=True)
- roles = client.roles.roles_for_user(user=self.user_foo['id'],
- tenant=self.tenant_bar['id'])
- self.assertTrue(len(roles) > 0)
-
- def test_user_can_update_passwd(self):
- client = self.get_client(self.user_two)
-
- token_id = client.auth_token
- new_password = uuid.uuid4().hex
-
- # TODO(derekh): Update to use keystoneclient when available
- class FakeResponse(object):
- def start_fake_response(self, status, headers):
- self.response_status = int(status.split(' ', 1)[0])
- self.response_headers = dict(headers)
- responseobject = FakeResponse()
-
- req = webob.Request.blank(
- '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'],
- headers={'X-Auth-Token': token_id})
- req.method = 'PATCH'
- req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
- (new_password, self.user_two['password']))
- self.public_server.application(req.environ,
- responseobject.start_fake_response)
-
- self.user_two['password'] = new_password
- self.get_client(self.user_two)
-
- def test_user_cannot_update_other_users_passwd(self):
- client = self.get_client(self.user_two)
-
- token_id = client.auth_token
- new_password = uuid.uuid4().hex
-
- # TODO(derekh): Update to use keystoneclient when available
- class FakeResponse(object):
- def start_fake_response(self, status, headers):
- self.response_status = int(status.split(' ', 1)[0])
- self.response_headers = dict(headers)
- responseobject = FakeResponse()
-
- req = webob.Request.blank(
- '/v2.0/OS-KSCRUD/users/%s' % self.user_foo['id'],
- headers={'X-Auth-Token': token_id})
- req.method = 'PATCH'
- req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
- (new_password, self.user_two['password']))
- self.public_server.application(req.environ,
- responseobject.start_fake_response)
- self.assertEqual(http_client.FORBIDDEN,
- responseobject.response_status)
-
- self.user_two['password'] = new_password
- self.assertRaises(client_exceptions.Unauthorized,
- self.get_client, self.user_two)
-
- def test_tokens_after_user_update_passwd(self):
- client = self.get_client(self.user_two)
-
- token_id = client.auth_token
- new_password = uuid.uuid4().hex
-
- # TODO(derekh): Update to use keystoneclient when available
- class FakeResponse(object):
- def start_fake_response(self, status, headers):
- self.response_status = int(status.split(' ', 1)[0])
- self.response_headers = dict(headers)
- responseobject = FakeResponse()
-
- req = webob.Request.blank(
- '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'],
- headers={'X-Auth-Token': token_id})
- req.method = 'PATCH'
- req.body = ('{"user":{"password":"%s","original_password":"%s"}}' %
- (new_password, self.user_two['password']))
-
- rv = self.public_server.application(
- req.environ,
- responseobject.start_fake_response)
- response_json = jsonutils.loads(rv.pop())
- new_token_id = response_json['access']['token']['id']
-
- self.assertRaises(client_exceptions.Unauthorized, client.tenants.list)
- client.auth_token = new_token_id
- client.tenants.list()
-
- def test_endpoint_crud(self):
- client = self.get_client(admin=True)
-
- service = client.services.create(name=uuid.uuid4().hex,
- service_type=uuid.uuid4().hex,
- description=uuid.uuid4().hex)
-
- endpoint_region = uuid.uuid4().hex
- invalid_service_id = uuid.uuid4().hex
- endpoint_publicurl = uuid.uuid4().hex
- endpoint_internalurl = uuid.uuid4().hex
- endpoint_adminurl = uuid.uuid4().hex
-
- # a non-existent service ID should trigger a 400
- self.assertRaises(client_exceptions.BadRequest,
- client.endpoints.create,
- region=endpoint_region,
- service_id=invalid_service_id,
- publicurl=endpoint_publicurl,
- adminurl=endpoint_adminurl,
- internalurl=endpoint_internalurl)
-
- endpoint = client.endpoints.create(region=endpoint_region,
- service_id=service.id,
- publicurl=endpoint_publicurl,
- adminurl=endpoint_adminurl,
- internalurl=endpoint_internalurl)
-
- self.assertEqual(endpoint_region, endpoint.region)
- self.assertEqual(service.id, endpoint.service_id)
- self.assertEqual(endpoint_publicurl, endpoint.publicurl)
- self.assertEqual(endpoint_internalurl, endpoint.internalurl)
- self.assertEqual(endpoint_adminurl, endpoint.adminurl)
-
- client.endpoints.delete(id=endpoint.id)
- self.assertRaises(client_exceptions.NotFound, client.endpoints.delete,
- id=endpoint.id)
-
- def _send_ec2_auth_request(self, credentials, client=None):
- if not client:
- client = self.default_client
- url = '%s/ec2tokens' % self.default_client.auth_url
- resp = client.session.request(
- url=url, method='POST',
- json={'credentials': credentials})
- return resp, resp.json()
-
- def _generate_default_user_ec2_credentials(self):
- cred = self. default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- return self._generate_user_ec2_credentials(cred.access, cred.secret)
-
- def _generate_user_ec2_credentials(self, access, secret):
- signer = ec2_utils.Ec2Signer(secret)
- credentials = {'params': {'SignatureVersion': '2'},
- 'access': access,
- 'verb': 'GET',
- 'host': 'localhost',
- 'path': '/service/cloud'}
- signature = signer.generate(credentials)
- return credentials, signature
-
- def test_ec2_auth_success(self):
- credentials, signature = self._generate_default_user_ec2_credentials()
- credentials['signature'] = signature
- resp, token = self._send_ec2_auth_request(credentials)
- self.assertEqual(200, resp.status_code)
- self.assertIn('access', token)
-
- def test_ec2_auth_success_trust(self):
- # Add "other" role user_foo and create trust delegating it to user_two
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_other['id'])
- trust_id = 'atrust123'
- trust = {'trustor_user_id': self.user_foo['id'],
- 'trustee_user_id': self.user_two['id'],
- 'project_id': self.tenant_bar['id'],
- 'impersonation': True}
- roles = [self.role_other]
- self.trust_api.create_trust(trust_id, trust, roles)
-
- # Create a client for user_two, scoped to the trust
- client = self.get_client(self.user_two)
- ret = client.authenticate(trust_id=trust_id,
- tenant_id=self.tenant_bar['id'])
- self.assertTrue(ret)
- self.assertTrue(client.auth_ref.trust_scoped)
- self.assertEqual(trust_id, client.auth_ref.trust_id)
-
- # Create an ec2 keypair using the trust client impersonating user_foo
- cred = client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- credentials, signature = self._generate_user_ec2_credentials(
- cred.access, cred.secret)
- credentials['signature'] = signature
- resp, token = self._send_ec2_auth_request(credentials)
- self.assertEqual(200, resp.status_code)
- self.assertEqual(trust_id, token['access']['trust']['id'])
- # TODO(shardy) we really want to check the roles and trustee
- # but because of where the stubbing happens we don't seem to
- # hit the necessary code in controllers.py _authenticate_token
- # so although all is OK via a real request, it incorrect in
- # this test..
-
- def test_ec2_auth_failure(self):
- credentials, signature = self._generate_default_user_ec2_credentials()
- credentials['signature'] = uuid.uuid4().hex
- self.assertRaises(client_exceptions.Unauthorized,
- self._send_ec2_auth_request,
- credentials)
-
- def test_ec2_credential_crud(self):
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual([], creds)
-
- cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual(creds, [cred])
- got = self.default_client.ec2.get(user_id=self.user_foo['id'],
- access=cred.access)
- self.assertEqual(cred, got)
-
- self.default_client.ec2.delete(user_id=self.user_foo['id'],
- access=cred.access)
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual([], creds)
-
- def test_ec2_credential_crud_non_admin(self):
- na_client = self.get_client(self.user_two)
- creds = na_client.ec2.list(user_id=self.user_two['id'])
- self.assertEqual([], creds)
-
- cred = na_client.ec2.create(user_id=self.user_two['id'],
- tenant_id=self.tenant_baz['id'])
- creds = na_client.ec2.list(user_id=self.user_two['id'])
- self.assertEqual(creds, [cred])
- got = na_client.ec2.get(user_id=self.user_two['id'],
- access=cred.access)
- self.assertEqual(cred, got)
-
- na_client.ec2.delete(user_id=self.user_two['id'],
- access=cred.access)
- creds = na_client.ec2.list(user_id=self.user_two['id'])
- self.assertEqual([], creds)
-
- def test_ec2_list_credentials(self):
- cred_1 = self.default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- cred_2 = self.default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_service['id'])
- cred_3 = self.default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_mtu['id'])
- two = self.get_client(self.user_two)
- cred_4 = two.ec2.create(user_id=self.user_two['id'],
- tenant_id=self.tenant_bar['id'])
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual(3, len(creds))
- self.assertEqual(sorted([cred_1, cred_2, cred_3],
- key=lambda x: x.access),
- sorted(creds, key=lambda x: x.access))
- self.assertNotIn(cred_4, creds)
-
- def test_ec2_credentials_create_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.create,
- user_id=uuid.uuid4().hex,
- tenant_id=self.tenant_bar['id'])
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.create,
- user_id=self.user_foo['id'],
- tenant_id=uuid.uuid4().hex)
-
- def test_ec2_credentials_delete_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.delete,
- user_id=uuid.uuid4().hex,
- access=uuid.uuid4().hex)
-
- def test_ec2_credentials_get_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.get,
- user_id=uuid.uuid4().hex,
- access=uuid.uuid4().hex)
-
- def test_ec2_credentials_list_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.list,
- user_id=uuid.uuid4().hex)
-
- def test_ec2_credentials_list_user_forbidden(self):
- two = self.get_client(self.user_two)
- self.assertRaises(client_exceptions.Forbidden, two.ec2.list,
- user_id=self.user_foo['id'])
-
- def test_ec2_credentials_get_user_forbidden(self):
- cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
-
- two = self.get_client(self.user_two)
- self.assertRaises(client_exceptions.Forbidden, two.ec2.get,
- user_id=self.user_foo['id'], access=cred.access)
-
- self.default_client.ec2.delete(user_id=self.user_foo['id'],
- access=cred.access)
-
- def test_ec2_credentials_delete_user_forbidden(self):
- cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
-
- two = self.get_client(self.user_two)
- self.assertRaises(client_exceptions.Forbidden, two.ec2.delete,
- user_id=self.user_foo['id'], access=cred.access)
-
- self.default_client.ec2.delete(user_id=self.user_foo['id'],
- access=cred.access)
-
- def test_endpoint_create_nonexistent_service(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.BadRequest,
- client.endpoints.create,
- region=uuid.uuid4().hex,
- service_id=uuid.uuid4().hex,
- publicurl=uuid.uuid4().hex,
- adminurl=uuid.uuid4().hex,
- internalurl=uuid.uuid4().hex)
-
- def test_policy_crud(self):
- # FIXME(dolph): this test was written prior to the v3 implementation of
- # the client and essentially refers to a non-existent
- # policy manager in the v2 client. this test needs to be
- # moved to a test suite running against the v3 api
- self.skipTest('Written prior to v3 client; needs refactor')
-
- client = self.get_client(admin=True)
-
- policy_blob = uuid.uuid4().hex
- policy_type = uuid.uuid4().hex
- service = client.services.create(
- name=uuid.uuid4().hex,
- service_type=uuid.uuid4().hex,
- description=uuid.uuid4().hex)
- endpoint = client.endpoints.create(
- service_id=service.id,
- region=uuid.uuid4().hex,
- adminurl=uuid.uuid4().hex,
- internalurl=uuid.uuid4().hex,
- publicurl=uuid.uuid4().hex)
-
- # create
- policy = client.policies.create(
- blob=policy_blob,
- type=policy_type,
- endpoint=endpoint.id)
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- policy = client.policies.get(policy=policy.id)
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id]
- endpoint = endpoints[0]
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- # update
- policy_blob = uuid.uuid4().hex
- policy_type = uuid.uuid4().hex
- endpoint = client.endpoints.create(
- service_id=service.id,
- region=uuid.uuid4().hex,
- adminurl=uuid.uuid4().hex,
- internalurl=uuid.uuid4().hex,
- publicurl=uuid.uuid4().hex)
-
- policy = client.policies.update(
- policy=policy.id,
- blob=policy_blob,
- type=policy_type,
- endpoint=endpoint.id)
-
- policy = client.policies.get(policy=policy.id)
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- # delete
- client.policies.delete(policy=policy.id)
- self.assertRaises(
- client_exceptions.NotFound,
- client.policies.get,
- policy=policy.id)
- policies = [x for x in client.policies.list() if x.id == policy.id]
- self.assertEqual(0, len(policies))
diff --git a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py
deleted file mode 100644
index 0fb60fd9..00000000
--- a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from keystoneclient.contrib.ec2 import utils as ec2_utils
-from keystoneclient import exceptions as client_exceptions
-
-from keystone.tests import unit as tests
-from keystone.tests.unit import test_v2_keystoneclient
-
-
-class ClientDrivenSqlTestCase(test_v2_keystoneclient.ClientDrivenTestCase):
- def config_files(self):
- config_files = super(ClientDrivenSqlTestCase, self).config_files()
- config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def setUp(self):
- super(ClientDrivenSqlTestCase, self).setUp()
- self.default_client = self.get_client()
- self.addCleanup(self.cleanup_instance('default_client'))
-
- def test_endpoint_crud(self):
- client = self.get_client(admin=True)
-
- service = client.services.create(name=uuid.uuid4().hex,
- service_type=uuid.uuid4().hex,
- description=uuid.uuid4().hex)
-
- endpoint_region = uuid.uuid4().hex
- invalid_service_id = uuid.uuid4().hex
- endpoint_publicurl = uuid.uuid4().hex
- endpoint_internalurl = uuid.uuid4().hex
- endpoint_adminurl = uuid.uuid4().hex
-
- # a non-existent service ID should trigger a 400
- self.assertRaises(client_exceptions.BadRequest,
- client.endpoints.create,
- region=endpoint_region,
- service_id=invalid_service_id,
- publicurl=endpoint_publicurl,
- adminurl=endpoint_adminurl,
- internalurl=endpoint_internalurl)
-
- endpoint = client.endpoints.create(region=endpoint_region,
- service_id=service.id,
- publicurl=endpoint_publicurl,
- adminurl=endpoint_adminurl,
- internalurl=endpoint_internalurl)
-
- self.assertEqual(endpoint_region, endpoint.region)
- self.assertEqual(service.id, endpoint.service_id)
- self.assertEqual(endpoint_publicurl, endpoint.publicurl)
- self.assertEqual(endpoint_internalurl, endpoint.internalurl)
- self.assertEqual(endpoint_adminurl, endpoint.adminurl)
-
- client.endpoints.delete(id=endpoint.id)
- self.assertRaises(client_exceptions.NotFound, client.endpoints.delete,
- id=endpoint.id)
-
- def _send_ec2_auth_request(self, credentials, client=None):
- if not client:
- client = self.default_client
- url = '%s/ec2tokens' % self.default_client.auth_url
- (resp, token) = client.request(
- url=url, method='POST',
- body={'credentials': credentials})
- return resp, token
-
- def _generate_default_user_ec2_credentials(self):
- cred = self. default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- return self._generate_user_ec2_credentials(cred.access, cred.secret)
-
- def _generate_user_ec2_credentials(self, access, secret):
- signer = ec2_utils.Ec2Signer(secret)
- credentials = {'params': {'SignatureVersion': '2'},
- 'access': access,
- 'verb': 'GET',
- 'host': 'localhost',
- 'path': '/service/cloud'}
- signature = signer.generate(credentials)
- return credentials, signature
-
- def test_ec2_auth_success(self):
- credentials, signature = self._generate_default_user_ec2_credentials()
- credentials['signature'] = signature
- resp, token = self._send_ec2_auth_request(credentials)
- self.assertEqual(200, resp.status_code)
- self.assertIn('access', token)
-
- def test_ec2_auth_success_trust(self):
- # Add "other" role user_foo and create trust delegating it to user_two
- self.assignment_api.add_role_to_user_and_project(
- self.user_foo['id'],
- self.tenant_bar['id'],
- self.role_other['id'])
- trust_id = 'atrust123'
- trust = {'trustor_user_id': self.user_foo['id'],
- 'trustee_user_id': self.user_two['id'],
- 'project_id': self.tenant_bar['id'],
- 'impersonation': True}
- roles = [self.role_other]
- self.trust_api.create_trust(trust_id, trust, roles)
-
- # Create a client for user_two, scoped to the trust
- client = self.get_client(self.user_two)
- ret = client.authenticate(trust_id=trust_id,
- tenant_id=self.tenant_bar['id'])
- self.assertTrue(ret)
- self.assertTrue(client.auth_ref.trust_scoped)
- self.assertEqual(trust_id, client.auth_ref.trust_id)
-
- # Create an ec2 keypair using the trust client impersonating user_foo
- cred = client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- credentials, signature = self._generate_user_ec2_credentials(
- cred.access, cred.secret)
- credentials['signature'] = signature
- resp, token = self._send_ec2_auth_request(credentials)
- self.assertEqual(200, resp.status_code)
- self.assertEqual(trust_id, token['access']['trust']['id'])
- # TODO(shardy) we really want to check the roles and trustee
- # but because of where the stubbing happens we don't seem to
- # hit the necessary code in controllers.py _authenticate_token
- # so although all is OK via a real request, it incorrect in
- # this test..
-
- def test_ec2_auth_failure(self):
- credentials, signature = self._generate_default_user_ec2_credentials()
- credentials['signature'] = uuid.uuid4().hex
- self.assertRaises(client_exceptions.Unauthorized,
- self._send_ec2_auth_request,
- credentials)
-
- def test_ec2_credential_crud(self):
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual([], creds)
-
- cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual(creds, [cred])
- got = self.default_client.ec2.get(user_id=self.user_foo['id'],
- access=cred.access)
- self.assertEqual(cred, got)
-
- self.default_client.ec2.delete(user_id=self.user_foo['id'],
- access=cred.access)
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual([], creds)
-
- def test_ec2_credential_crud_non_admin(self):
- na_client = self.get_client(self.user_two)
- creds = na_client.ec2.list(user_id=self.user_two['id'])
- self.assertEqual([], creds)
-
- cred = na_client.ec2.create(user_id=self.user_two['id'],
- tenant_id=self.tenant_baz['id'])
- creds = na_client.ec2.list(user_id=self.user_two['id'])
- self.assertEqual(creds, [cred])
- got = na_client.ec2.get(user_id=self.user_two['id'],
- access=cred.access)
- self.assertEqual(cred, got)
-
- na_client.ec2.delete(user_id=self.user_two['id'],
- access=cred.access)
- creds = na_client.ec2.list(user_id=self.user_two['id'])
- self.assertEqual([], creds)
-
- def test_ec2_list_credentials(self):
- cred_1 = self.default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
- cred_2 = self.default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_service['id'])
- cred_3 = self.default_client.ec2.create(
- user_id=self.user_foo['id'],
- tenant_id=self.tenant_mtu['id'])
- two = self.get_client(self.user_two)
- cred_4 = two.ec2.create(user_id=self.user_two['id'],
- tenant_id=self.tenant_bar['id'])
- creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
- self.assertEqual(3, len(creds))
- self.assertEqual(sorted([cred_1, cred_2, cred_3],
- key=lambda x: x.access),
- sorted(creds, key=lambda x: x.access))
- self.assertNotIn(cred_4, creds)
-
- def test_ec2_credentials_create_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.create,
- user_id=uuid.uuid4().hex,
- tenant_id=self.tenant_bar['id'])
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.create,
- user_id=self.user_foo['id'],
- tenant_id=uuid.uuid4().hex)
-
- def test_ec2_credentials_delete_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.delete,
- user_id=uuid.uuid4().hex,
- access=uuid.uuid4().hex)
-
- def test_ec2_credentials_get_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.get,
- user_id=uuid.uuid4().hex,
- access=uuid.uuid4().hex)
-
- def test_ec2_credentials_list_404(self):
- self.assertRaises(client_exceptions.NotFound,
- self.default_client.ec2.list,
- user_id=uuid.uuid4().hex)
-
- def test_ec2_credentials_list_user_forbidden(self):
- two = self.get_client(self.user_two)
- self.assertRaises(client_exceptions.Forbidden, two.ec2.list,
- user_id=self.user_foo['id'])
-
- def test_ec2_credentials_get_user_forbidden(self):
- cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
-
- two = self.get_client(self.user_two)
- self.assertRaises(client_exceptions.Forbidden, two.ec2.get,
- user_id=self.user_foo['id'], access=cred.access)
-
- self.default_client.ec2.delete(user_id=self.user_foo['id'],
- access=cred.access)
-
- def test_ec2_credentials_delete_user_forbidden(self):
- cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
- tenant_id=self.tenant_bar['id'])
-
- two = self.get_client(self.user_two)
- self.assertRaises(client_exceptions.Forbidden, two.ec2.delete,
- user_id=self.user_foo['id'], access=cred.access)
-
- self.default_client.ec2.delete(user_id=self.user_foo['id'],
- access=cred.access)
-
- def test_endpoint_create_nonexistent_service(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.BadRequest,
- client.endpoints.create,
- region=uuid.uuid4().hex,
- service_id=uuid.uuid4().hex,
- publicurl=uuid.uuid4().hex,
- adminurl=uuid.uuid4().hex,
- internalurl=uuid.uuid4().hex)
-
- def test_endpoint_delete_404(self):
- client = self.get_client(admin=True)
- self.assertRaises(client_exceptions.NotFound,
- client.endpoints.delete,
- id=uuid.uuid4().hex)
-
- def test_policy_crud(self):
- # FIXME(dolph): this test was written prior to the v3 implementation of
- # the client and essentially refers to a non-existent
- # policy manager in the v2 client. this test needs to be
- # moved to a test suite running against the v3 api
- self.skipTest('Written prior to v3 client; needs refactor')
-
- client = self.get_client(admin=True)
-
- policy_blob = uuid.uuid4().hex
- policy_type = uuid.uuid4().hex
- service = client.services.create(
- name=uuid.uuid4().hex,
- service_type=uuid.uuid4().hex,
- description=uuid.uuid4().hex)
- endpoint = client.endpoints.create(
- service_id=service.id,
- region=uuid.uuid4().hex,
- adminurl=uuid.uuid4().hex,
- internalurl=uuid.uuid4().hex,
- publicurl=uuid.uuid4().hex)
-
- # create
- policy = client.policies.create(
- blob=policy_blob,
- type=policy_type,
- endpoint=endpoint.id)
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- policy = client.policies.get(policy=policy.id)
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id]
- endpoint = endpoints[0]
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- # update
- policy_blob = uuid.uuid4().hex
- policy_type = uuid.uuid4().hex
- endpoint = client.endpoints.create(
- service_id=service.id,
- region=uuid.uuid4().hex,
- adminurl=uuid.uuid4().hex,
- internalurl=uuid.uuid4().hex,
- publicurl=uuid.uuid4().hex)
-
- policy = client.policies.update(
- policy=policy.id,
- blob=policy_blob,
- type=policy_type,
- endpoint=endpoint.id)
-
- policy = client.policies.get(policy=policy.id)
- self.assertEqual(policy_blob, policy.policy)
- self.assertEqual(policy_type, policy.type)
- self.assertEqual(endpoint.id, policy.endpoint_id)
-
- # delete
- client.policies.delete(policy=policy.id)
- self.assertRaises(
- client_exceptions.NotFound,
- client.policies.get,
- policy=policy.id)
- policies = [x for x in client.policies.list() if x.id == policy.id]
- self.assertEqual(0, len(policies))
diff --git a/keystone-moon/keystone/tests/unit/test_v3.py b/keystone-moon/keystone/tests/unit/test_v3.py
deleted file mode 100644
index 216d8c79..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3.py
+++ /dev/null
@@ -1,1640 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import mock
-from oslo_config import cfg
-import oslo_context.context
-from oslo_serialization import jsonutils
-from oslo_utils import timeutils
-from six.moves import http_client
-from testtools import matchers
-import webtest
-
-from keystone import auth
-from keystone.common import authorization
-from keystone.common import cache
-from keystone.common.validation import validators
-from keystone import exception
-from keystone import middleware
-from keystone.middleware import auth as middleware_auth
-from keystone.tests.common import auth as common_auth
-from keystone.tests import unit
-from keystone.tests.unit import rest
-
-
-CONF = cfg.CONF
-DEFAULT_DOMAIN_ID = 'default'
-
-TIME_FORMAT = unit.TIME_FORMAT
-
-
-class AuthTestMixin(object):
- """To hold auth building helper functions."""
-
- def build_auth_scope(self, project_id=None, project_name=None,
- project_domain_id=None, project_domain_name=None,
- domain_id=None, domain_name=None, trust_id=None,
- unscoped=None):
- scope_data = {}
- if unscoped:
- scope_data['unscoped'] = {}
- if project_id or project_name:
- scope_data['project'] = {}
- if project_id:
- scope_data['project']['id'] = project_id
- else:
- scope_data['project']['name'] = project_name
- if project_domain_id or project_domain_name:
- project_domain_json = {}
- if project_domain_id:
- project_domain_json['id'] = project_domain_id
- else:
- project_domain_json['name'] = project_domain_name
- scope_data['project']['domain'] = project_domain_json
- if domain_id or domain_name:
- scope_data['domain'] = {}
- if domain_id:
- scope_data['domain']['id'] = domain_id
- else:
- scope_data['domain']['name'] = domain_name
- if trust_id:
- scope_data['OS-TRUST:trust'] = {}
- scope_data['OS-TRUST:trust']['id'] = trust_id
- return scope_data
-
- def build_password_auth(self, user_id=None, username=None,
- user_domain_id=None, user_domain_name=None,
- password=None):
- password_data = {'user': {}}
- if user_id:
- password_data['user']['id'] = user_id
- else:
- password_data['user']['name'] = username
- if user_domain_id or user_domain_name:
- password_data['user']['domain'] = {}
- if user_domain_id:
- password_data['user']['domain']['id'] = user_domain_id
- else:
- password_data['user']['domain']['name'] = user_domain_name
- password_data['user']['password'] = password
- return password_data
-
- def build_token_auth(self, token):
- return {'id': token}
-
- def build_authentication_request(self, token=None, user_id=None,
- username=None, user_domain_id=None,
- user_domain_name=None, password=None,
- kerberos=False, **kwargs):
- """Build auth dictionary.
-
- It will create an auth dictionary based on all the arguments
- that it receives.
- """
- auth_data = {}
- auth_data['identity'] = {'methods': []}
- if kerberos:
- auth_data['identity']['methods'].append('kerberos')
- auth_data['identity']['kerberos'] = {}
- if token:
- auth_data['identity']['methods'].append('token')
- auth_data['identity']['token'] = self.build_token_auth(token)
- if user_id or username:
- auth_data['identity']['methods'].append('password')
- auth_data['identity']['password'] = self.build_password_auth(
- user_id, username, user_domain_id, user_domain_name, password)
- if kwargs:
- auth_data['scope'] = self.build_auth_scope(**kwargs)
- return {'auth': auth_data}
-
-
-class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
- common_auth.AuthTestMixin):
-
- def generate_token_schema(self, domain_scoped=False, project_scoped=False):
- """Return a dictionary of token properties to validate against."""
- properties = {
- 'audit_ids': {
- 'type': 'array',
- 'items': {
- 'type': 'string',
- },
- 'minItems': 1,
- 'maxItems': 2,
- },
- 'bind': {
- 'type': 'object',
- 'properties': {
- 'kerberos': {
- 'type': 'string',
- },
- },
- 'required': ['kerberos'],
- 'additionalProperties': False,
- },
- 'expires_at': {'type': 'string'},
- 'issued_at': {'type': 'string'},
- 'methods': {
- 'type': 'array',
- 'items': {
- 'type': 'string',
- },
- },
- 'user': {
- 'type': 'object',
- 'required': ['id', 'name', 'domain'],
- 'properties': {
- 'id': {'type': 'string'},
- 'name': {'type': 'string'},
- 'domain': {
- 'type': 'object',
- 'properties': {
- 'id': {'type': 'string'},
- 'name': {'type': 'string'}
- },
- 'required': ['id', 'name'],
- 'additonalProperties': False,
- }
- },
- 'additionalProperties': False,
- }
- }
-
- if domain_scoped:
- properties['catalog'] = {'type': 'array'}
- properties['roles'] = {
- 'type': 'array',
- 'items': {
- 'type': 'object',
- 'properties': {
- 'id': {'type': 'string', },
- 'name': {'type': 'string', },
- },
- 'required': ['id', 'name', ],
- 'additionalProperties': False,
- },
- 'minItems': 1,
- }
- properties['domain'] = {
- 'domain': {
- 'type': 'object',
- 'required': ['id', 'name'],
- 'properties': {
- 'id': {'type': 'string'},
- 'name': {'type': 'string'}
- },
- 'additionalProperties': False
- }
- }
- elif project_scoped:
- properties['is_admin_project'] = {'type': 'boolean'}
- properties['catalog'] = {'type': 'array'}
- properties['roles'] = {'type': 'array'}
- properties['project'] = {
- 'type': ['object'],
- 'required': ['id', 'name', 'domain'],
- 'properties': {
- 'id': {'type': 'string'},
- 'name': {'type': 'string'},
- 'domain': {
- 'type': ['object'],
- 'required': ['id', 'name'],
- 'properties': {
- 'id': {'type': 'string'},
- 'name': {'type': 'string'}
- },
- 'additionalProperties': False
- }
- },
- 'additionalProperties': False
- }
-
- schema = {
- 'type': 'object',
- 'properties': properties,
- 'required': ['audit_ids', 'expires_at', 'issued_at', 'methods',
- 'user'],
- 'optional': ['bind'],
- 'additionalProperties': False
- }
-
- if domain_scoped:
- schema['required'].extend(['domain', 'roles'])
- schema['optional'].append('catalog')
- elif project_scoped:
- schema['required'].append('project')
- schema['optional'].append('bind')
- schema['optional'].append('catalog')
- schema['optional'].append('OS-TRUST:trust')
- schema['optional'].append('is_admin_project')
-
- return schema
-
- def config_files(self):
- config_files = super(RestfulTestCase, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
- return config_files
-
- def get_extensions(self):
- extensions = set(['revoke'])
- if hasattr(self, 'EXTENSION_NAME'):
- extensions.add(self.EXTENSION_NAME)
- return extensions
-
- def generate_paste_config(self):
- new_paste_file = None
- try:
- new_paste_file = unit.generate_paste_config(self.EXTENSION_TO_ADD)
- except AttributeError:
- # no need to report this error here, as most tests will not have
- # EXTENSION_TO_ADD defined.
- pass
- finally:
- return new_paste_file
-
- def remove_generated_paste_config(self):
- try:
- unit.remove_generated_paste_config(self.EXTENSION_TO_ADD)
- except AttributeError:
- pass
-
- def setUp(self, app_conf='keystone'):
- """Setup for v3 Restful Test Cases."""
- new_paste_file = self.generate_paste_config()
- self.addCleanup(self.remove_generated_paste_config)
- if new_paste_file:
- app_conf = 'config:%s' % (new_paste_file)
-
- super(RestfulTestCase, self).setUp(app_conf=app_conf)
-
- self.empty_context = {'environment': {}}
-
- def load_backends(self):
- # ensure the cache region instance is setup
- cache.configure_cache()
-
- super(RestfulTestCase, self).load_backends()
-
- def load_fixtures(self, fixtures):
- self.load_sample_data()
-
- def _populate_default_domain(self):
- if CONF.database.connection == unit.IN_MEM_DB_CONN_STRING:
- # NOTE(morganfainberg): If an in-memory db is being used, be sure
- # to populate the default domain, this is typically done by
- # a migration, but the in-mem db uses model definitions to create
- # the schema (no migrations are run).
- try:
- self.resource_api.get_domain(DEFAULT_DOMAIN_ID)
- except exception.DomainNotFound:
- domain = unit.new_domain_ref(
- description=(u'The default domain'),
- id=DEFAULT_DOMAIN_ID,
- name=u'Default')
- self.resource_api.create_domain(DEFAULT_DOMAIN_ID, domain)
-
- def load_sample_data(self):
- self._populate_default_domain()
- self.domain = unit.new_domain_ref()
- self.domain_id = self.domain['id']
- self.resource_api.create_domain(self.domain_id, self.domain)
-
- self.project = unit.new_project_ref(domain_id=self.domain_id)
- self.project_id = self.project['id']
- self.project = self.resource_api.create_project(self.project_id,
- self.project)
-
- self.user = unit.create_user(self.identity_api,
- domain_id=self.domain_id)
- self.user_id = self.user['id']
-
- self.default_domain_project_id = uuid.uuid4().hex
- self.default_domain_project = unit.new_project_ref(
- domain_id=DEFAULT_DOMAIN_ID)
- self.default_domain_project['id'] = self.default_domain_project_id
- self.resource_api.create_project(self.default_domain_project_id,
- self.default_domain_project)
-
- self.default_domain_user = unit.create_user(
- self.identity_api,
- domain_id=DEFAULT_DOMAIN_ID)
- self.default_domain_user_id = self.default_domain_user['id']
-
- # create & grant policy.json's default role for admin_required
- self.role = unit.new_role_ref(name='admin')
- self.role_id = self.role['id']
- self.role_api.create_role(self.role_id, self.role)
- self.assignment_api.add_role_to_user_and_project(
- self.user_id, self.project_id, self.role_id)
- self.assignment_api.add_role_to_user_and_project(
- self.default_domain_user_id, self.default_domain_project_id,
- self.role_id)
- self.assignment_api.add_role_to_user_and_project(
- self.default_domain_user_id, self.project_id,
- self.role_id)
-
- # Create "req_admin" user for simulating a real user instead of the
- # admin_token_auth middleware
- self.user_reqadmin = unit.create_user(self.identity_api,
- DEFAULT_DOMAIN_ID)
- self.assignment_api.add_role_to_user_and_project(
- self.user_reqadmin['id'],
- self.default_domain_project_id,
- self.role_id)
-
- self.region = unit.new_region_ref()
- self.region_id = self.region['id']
- self.catalog_api.create_region(self.region)
-
- self.service = unit.new_service_ref()
- self.service_id = self.service['id']
- self.catalog_api.create_service(self.service_id, self.service.copy())
-
- self.endpoint = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- self.endpoint_id = self.endpoint['id']
- self.catalog_api.create_endpoint(self.endpoint_id,
- self.endpoint.copy())
- # The server adds 'enabled' and defaults to True.
- self.endpoint['enabled'] = True
-
- def create_new_default_project_for_user(self, user_id, domain_id,
- enable_project=True):
- ref = unit.new_project_ref(domain_id=domain_id, enabled=enable_project)
- r = self.post('/projects', body={'project': ref})
- project = self.assertValidProjectResponse(r, ref)
- # set the user's preferred project
- body = {'user': {'default_project_id': project['id']}}
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': user_id},
- body=body)
- self.assertValidUserResponse(r)
-
- return project
-
- def get_admin_token(self):
- """Convenience method so that we can test authenticated requests."""
- r = self.admin_request(
- method='POST',
- path='/v3/auth/tokens',
- body={
- 'auth': {
- 'identity': {
- 'methods': ['password'],
- 'password': {
- 'user': {
- 'name': self.user_reqadmin['name'],
- 'password': self.user_reqadmin['password'],
- 'domain': {
- 'id': self.user_reqadmin['domain_id']
- }
- }
- }
- },
- 'scope': {
- 'project': {
- 'id': self.default_domain_project_id,
- }
- }
- }
- })
- return r.headers.get('X-Subject-Token')
-
- def get_unscoped_token(self):
- """Convenience method so that we can test authenticated requests."""
- r = self.admin_request(
- method='POST',
- path='/v3/auth/tokens',
- body={
- 'auth': {
- 'identity': {
- 'methods': ['password'],
- 'password': {
- 'user': {
- 'name': self.user['name'],
- 'password': self.user['password'],
- 'domain': {
- 'id': self.user['domain_id']
- }
- }
- }
- }
- }
- })
- return r.headers.get('X-Subject-Token')
-
- def get_scoped_token(self):
- """Convenience method so that we can test authenticated requests."""
- r = self.admin_request(
- method='POST',
- path='/v3/auth/tokens',
- body={
- 'auth': {
- 'identity': {
- 'methods': ['password'],
- 'password': {
- 'user': {
- 'name': self.user['name'],
- 'password': self.user['password'],
- 'domain': {
- 'id': self.user['domain_id']
- }
- }
- }
- },
- 'scope': {
- 'project': {
- 'id': self.project['id'],
- }
- }
- }
- })
- return r.headers.get('X-Subject-Token')
-
- def get_domain_scoped_token(self):
- """Convenience method for requesting domain scoped token."""
- r = self.admin_request(
- method='POST',
- path='/v3/auth/tokens',
- body={
- 'auth': {
- 'identity': {
- 'methods': ['password'],
- 'password': {
- 'user': {
- 'name': self.user['name'],
- 'password': self.user['password'],
- 'domain': {
- 'id': self.user['domain_id']
- }
- }
- }
- },
- 'scope': {
- 'domain': {
- 'id': self.domain['id'],
- }
- }
- }
- })
- return r.headers.get('X-Subject-Token')
-
- def get_requested_token(self, auth):
- """Request the specific token we want."""
- r = self.v3_create_token(auth)
- return r.headers.get('X-Subject-Token')
-
- def v3_create_token(self, auth, expected_status=http_client.CREATED):
- return self.admin_request(method='POST',
- path='/v3/auth/tokens',
- body=auth,
- expected_status=expected_status)
-
- def v3_noauth_request(self, path, **kwargs):
- # request does not require auth token header
- path = '/v3' + path
- return self.admin_request(path=path, **kwargs)
-
- def v3_request(self, path, **kwargs):
- # check to see if caller requires token for the API call.
- if kwargs.pop('noauth', None):
- return self.v3_noauth_request(path, **kwargs)
-
- # Check if the caller has passed in auth details for
- # use in requesting the token
- auth_arg = kwargs.pop('auth', None)
- if auth_arg:
- token = self.get_requested_token(auth_arg)
- else:
- token = kwargs.pop('token', None)
- if not token:
- token = self.get_scoped_token()
- path = '/v3' + path
-
- return self.admin_request(path=path, token=token, **kwargs)
-
- def get(self, path, expected_status=http_client.OK, **kwargs):
- return self.v3_request(path, method='GET',
- expected_status=expected_status, **kwargs)
-
- def head(self, path, expected_status=http_client.NO_CONTENT, **kwargs):
- r = self.v3_request(path, method='HEAD',
- expected_status=expected_status, **kwargs)
- self.assertEqual(b'', r.body)
- return r
-
- def post(self, path, expected_status=http_client.CREATED, **kwargs):
- return self.v3_request(path, method='POST',
- expected_status=expected_status, **kwargs)
-
- def put(self, path, expected_status=http_client.NO_CONTENT, **kwargs):
- return self.v3_request(path, method='PUT',
- expected_status=expected_status, **kwargs)
-
- def patch(self, path, expected_status=http_client.OK, **kwargs):
- return self.v3_request(path, method='PATCH',
- expected_status=expected_status, **kwargs)
-
- def delete(self, path, expected_status=http_client.NO_CONTENT, **kwargs):
- return self.v3_request(path, method='DELETE',
- expected_status=expected_status, **kwargs)
-
- def assertValidErrorResponse(self, r):
- resp = r.result
- self.assertIsNotNone(resp.get('error'))
- self.assertIsNotNone(resp['error'].get('code'))
- self.assertIsNotNone(resp['error'].get('title'))
- self.assertIsNotNone(resp['error'].get('message'))
- self.assertEqual(int(resp['error']['code']), r.status_code)
-
- def assertValidListLinks(self, links, resource_url=None):
- self.assertIsNotNone(links)
- self.assertIsNotNone(links.get('self'))
- self.assertThat(links['self'], matchers.StartsWith('http://localhost'))
-
- if resource_url:
- self.assertThat(links['self'], matchers.EndsWith(resource_url))
-
- self.assertIn('next', links)
- if links['next'] is not None:
- self.assertThat(links['next'],
- matchers.StartsWith('http://localhost'))
-
- self.assertIn('previous', links)
- if links['previous'] is not None:
- self.assertThat(links['previous'],
- matchers.StartsWith('http://localhost'))
-
- def assertValidListResponse(self, resp, key, entity_validator, ref=None,
- expected_length=None, keys_to_check=None,
- resource_url=None):
- """Make assertions common to all API list responses.
-
- If a reference is provided, it's ID will be searched for in the
- response, and asserted to be equal.
-
- """
- entities = resp.result.get(key)
- self.assertIsNotNone(entities)
-
- if expected_length is not None:
- self.assertEqual(expected_length, len(entities))
- elif ref is not None:
- # we're at least expecting the ref
- self.assertNotEmpty(entities)
-
- # collections should have relational links
- self.assertValidListLinks(resp.result.get('links'),
- resource_url=resource_url)
-
- for entity in entities:
- self.assertIsNotNone(entity)
- self.assertValidEntity(entity, keys_to_check=keys_to_check)
- entity_validator(entity)
- if ref:
- entity = [x for x in entities if x['id'] == ref['id']][0]
- self.assertValidEntity(entity, ref=ref,
- keys_to_check=keys_to_check)
- entity_validator(entity, ref)
- return entities
-
- def assertValidResponse(self, resp, key, entity_validator, *args,
- **kwargs):
- """Make assertions common to all API responses."""
- entity = resp.result.get(key)
- self.assertIsNotNone(entity)
- keys = kwargs.pop('keys_to_check', None)
- self.assertValidEntity(entity, keys_to_check=keys, *args, **kwargs)
- entity_validator(entity, *args, **kwargs)
- return entity
-
- def assertValidEntity(self, entity, ref=None, keys_to_check=None):
- """Make assertions common to all API entities.
-
- If a reference is provided, the entity will also be compared against
- the reference.
- """
- if keys_to_check is not None:
- keys = keys_to_check
- else:
- keys = ['name', 'description', 'enabled']
-
- for k in ['id'] + keys:
- msg = '%s unexpectedly None in %s' % (k, entity)
- self.assertIsNotNone(entity.get(k), msg)
-
- self.assertIsNotNone(entity.get('links'))
- self.assertIsNotNone(entity['links'].get('self'))
- self.assertThat(entity['links']['self'],
- matchers.StartsWith('http://localhost'))
- self.assertIn(entity['id'], entity['links']['self'])
-
- if ref:
- for k in keys:
- msg = '%s not equal: %s != %s' % (k, ref[k], entity[k])
- self.assertEqual(ref[k], entity[k])
-
- return entity
-
- # auth validation
-
- def assertValidISO8601ExtendedFormatDatetime(self, dt):
- try:
- return timeutils.parse_strtime(dt, fmt=TIME_FORMAT)
- except Exception:
- msg = '%s is not a valid ISO 8601 extended format date time.' % dt
- raise AssertionError(msg)
-
- def assertValidTokenResponse(self, r, user=None):
- self.assertTrue(r.headers.get('X-Subject-Token'))
- token = r.result['token']
-
- self.assertIsNotNone(token.get('expires_at'))
- expires_at = self.assertValidISO8601ExtendedFormatDatetime(
- token['expires_at'])
- self.assertIsNotNone(token.get('issued_at'))
- issued_at = self.assertValidISO8601ExtendedFormatDatetime(
- token['issued_at'])
- self.assertTrue(issued_at < expires_at)
-
- self.assertIn('user', token)
- self.assertIn('id', token['user'])
- self.assertIn('name', token['user'])
- self.assertIn('domain', token['user'])
- self.assertIn('id', token['user']['domain'])
-
- if user is not None:
- self.assertEqual(user['id'], token['user']['id'])
- self.assertEqual(user['name'], token['user']['name'])
- self.assertEqual(user['domain_id'], token['user']['domain']['id'])
-
- return token
-
- def assertValidUnscopedTokenResponse(self, r, *args, **kwargs):
- token = self.assertValidTokenResponse(r, *args, **kwargs)
- validator_object = validators.SchemaValidator(
- self.generate_token_schema()
- )
- validator_object.validate(token)
-
- return token
-
- def assertValidScopedTokenResponse(self, r, *args, **kwargs):
- require_catalog = kwargs.pop('require_catalog', True)
- endpoint_filter = kwargs.pop('endpoint_filter', False)
- ep_filter_assoc = kwargs.pop('ep_filter_assoc', 0)
- is_admin_project = kwargs.pop('is_admin_project', False)
- token = self.assertValidTokenResponse(r, *args, **kwargs)
-
- if require_catalog:
- endpoint_num = 0
- self.assertIn('catalog', token)
-
- if isinstance(token['catalog'], list):
- # only test JSON
- for service in token['catalog']:
- for endpoint in service['endpoints']:
- self.assertNotIn('enabled', endpoint)
- self.assertNotIn('legacy_endpoint_id', endpoint)
- self.assertNotIn('service_id', endpoint)
- endpoint_num += 1
-
- # sub test for the OS-EP-FILTER extension enabled
- if endpoint_filter:
- self.assertEqual(ep_filter_assoc, endpoint_num)
- else:
- self.assertNotIn('catalog', token)
-
- self.assertIn('roles', token)
- self.assertTrue(token['roles'])
- for role in token['roles']:
- self.assertIn('id', role)
- self.assertIn('name', role)
-
- if is_admin_project:
- # NOTE(samueldmq): We want to explicitly test for boolean
- self.assertIs(True, token['is_admin_project'])
- else:
- self.assertNotIn('is_admin_project', token)
-
- return token
-
- def assertValidProjectScopedTokenResponse(self, r, *args, **kwargs):
- token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
-
- project_scoped_token_schema = self.generate_token_schema(
- project_scoped=True)
-
- if token.get('OS-TRUST:trust'):
- trust_properties = {
- 'OS-TRUST:trust': {
- 'type': ['object'],
- 'required': ['id', 'impersonation', 'trustor_user',
- 'trustee_user'],
- 'properties': {
- 'id': {'type': 'string'},
- 'impersonation': {'type': 'boolean'},
- 'trustor_user': {
- 'type': 'object',
- 'required': ['id'],
- 'properties': {
- 'id': {'type': 'string'}
- },
- 'additionalProperties': False
- },
- 'trustee_user': {
- 'type': 'object',
- 'required': ['id'],
- 'properties': {
- 'id': {'type': 'string'}
- },
- 'additionalProperties': False
- }
- },
- 'additionalProperties': False
- }
- }
- project_scoped_token_schema['properties'].update(trust_properties)
-
- validator_object = validators.SchemaValidator(
- project_scoped_token_schema)
- validator_object.validate(token)
-
- self.assertEqual(self.role_id, token['roles'][0]['id'])
-
- return token
-
- def assertValidDomainScopedTokenResponse(self, r, *args, **kwargs):
- token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
-
- validator_object = validators.SchemaValidator(
- self.generate_token_schema(domain_scoped=True)
- )
- validator_object.validate(token)
-
- return token
-
- def assertEqualTokens(self, a, b):
- """Assert that two tokens are equal.
-
- Compare two tokens except for their ids. This also truncates
- the time in the comparison.
- """
- def normalize(token):
- del token['token']['expires_at']
- del token['token']['issued_at']
- return token
-
- a_expires_at = self.assertValidISO8601ExtendedFormatDatetime(
- a['token']['expires_at'])
- b_expires_at = self.assertValidISO8601ExtendedFormatDatetime(
- b['token']['expires_at'])
- self.assertCloseEnoughForGovernmentWork(a_expires_at, b_expires_at)
-
- a_issued_at = self.assertValidISO8601ExtendedFormatDatetime(
- a['token']['issued_at'])
- b_issued_at = self.assertValidISO8601ExtendedFormatDatetime(
- b['token']['issued_at'])
- self.assertCloseEnoughForGovernmentWork(a_issued_at, b_issued_at)
-
- return self.assertDictEqual(normalize(a), normalize(b))
-
- # catalog validation
-
- def assertValidCatalogResponse(self, resp, *args, **kwargs):
- self.assertEqual(set(['catalog', 'links']), set(resp.json.keys()))
- self.assertValidCatalog(resp.json['catalog'])
- self.assertIn('links', resp.json)
- self.assertIsInstance(resp.json['links'], dict)
- self.assertEqual(['self'], list(resp.json['links'].keys()))
- self.assertEqual(
- 'http://localhost/v3/auth/catalog',
- resp.json['links']['self'])
-
- def assertValidCatalog(self, entity):
- self.assertIsInstance(entity, list)
- self.assertTrue(len(entity) > 0)
- for service in entity:
- self.assertIsNotNone(service.get('id'))
- self.assertIsNotNone(service.get('name'))
- self.assertIsNotNone(service.get('type'))
- self.assertNotIn('enabled', service)
- self.assertTrue(len(service['endpoints']) > 0)
- for endpoint in service['endpoints']:
- self.assertIsNotNone(endpoint.get('id'))
- self.assertIsNotNone(endpoint.get('interface'))
- self.assertIsNotNone(endpoint.get('url'))
- self.assertNotIn('enabled', endpoint)
- self.assertNotIn('legacy_endpoint_id', endpoint)
- self.assertNotIn('service_id', endpoint)
-
- # region validation
-
- def assertValidRegionListResponse(self, resp, *args, **kwargs):
- # NOTE(jaypipes): I have to pass in a blank keys_to_check parameter
- # below otherwise the base assertValidEntity method
- # tries to find a "name" and an "enabled" key in the
- # returned ref dicts. The issue is, I don't understand
- # how the service and endpoint entity assertions below
- # actually work (they don't raise assertions), since
- # AFAICT, the service and endpoint tables don't have
- # a "name" column either... :(
- return self.assertValidListResponse(
- resp,
- 'regions',
- self.assertValidRegion,
- keys_to_check=[],
- *args,
- **kwargs)
-
- def assertValidRegionResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'region',
- self.assertValidRegion,
- keys_to_check=[],
- *args,
- **kwargs)
-
- def assertValidRegion(self, entity, ref=None):
- self.assertIsNotNone(entity.get('description'))
- if ref:
- self.assertEqual(ref['description'], entity['description'])
- return entity
-
- # service validation
-
- def assertValidServiceListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'services',
- self.assertValidService,
- *args,
- **kwargs)
-
- def assertValidServiceResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'service',
- self.assertValidService,
- *args,
- **kwargs)
-
- def assertValidService(self, entity, ref=None):
- self.assertIsNotNone(entity.get('type'))
- self.assertIsInstance(entity.get('enabled'), bool)
- if ref:
- self.assertEqual(ref['type'], entity['type'])
- return entity
-
- # endpoint validation
-
- def assertValidEndpointListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'endpoints',
- self.assertValidEndpoint,
- *args,
- **kwargs)
-
- def assertValidEndpointResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'endpoint',
- self.assertValidEndpoint,
- *args,
- **kwargs)
-
- def assertValidEndpoint(self, entity, ref=None):
- self.assertIsNotNone(entity.get('interface'))
- self.assertIsNotNone(entity.get('service_id'))
- self.assertIsInstance(entity['enabled'], bool)
-
- # this is intended to be an unexposed implementation detail
- self.assertNotIn('legacy_endpoint_id', entity)
-
- if ref:
- self.assertEqual(ref['interface'], entity['interface'])
- self.assertEqual(ref['service_id'], entity['service_id'])
- if ref.get('region') is not None:
- self.assertEqual(ref['region_id'], entity.get('region_id'))
-
- return entity
-
- # domain validation
-
- def assertValidDomainListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'domains',
- self.assertValidDomain,
- *args,
- **kwargs)
-
- def assertValidDomainResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'domain',
- self.assertValidDomain,
- *args,
- **kwargs)
-
- def assertValidDomain(self, entity, ref=None):
- if ref:
- pass
- return entity
-
- # project validation
-
- def assertValidProjectListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'projects',
- self.assertValidProject,
- *args,
- **kwargs)
-
- def assertValidProjectResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'project',
- self.assertValidProject,
- *args,
- **kwargs)
-
- def assertValidProject(self, entity, ref=None):
- if ref:
- self.assertEqual(ref['domain_id'], entity['domain_id'])
- return entity
-
- # user validation
-
- def assertValidUserListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'users',
- self.assertValidUser,
- keys_to_check=['name', 'enabled'],
- *args,
- **kwargs)
-
- def assertValidUserResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'user',
- self.assertValidUser,
- keys_to_check=['name', 'enabled'],
- *args,
- **kwargs)
-
- def assertValidUser(self, entity, ref=None):
- self.assertIsNotNone(entity.get('domain_id'))
- self.assertIsNotNone(entity.get('email'))
- self.assertIsNone(entity.get('password'))
- self.assertNotIn('tenantId', entity)
- if ref:
- self.assertEqual(ref['domain_id'], entity['domain_id'])
- self.assertEqual(ref['email'], entity['email'])
- if 'default_project_id' in ref:
- self.assertIsNotNone(ref['default_project_id'])
- self.assertEqual(ref['default_project_id'],
- entity['default_project_id'])
- return entity
-
- # group validation
-
- def assertValidGroupListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'groups',
- self.assertValidGroup,
- keys_to_check=['name', 'description', 'domain_id'],
- *args,
- **kwargs)
-
- def assertValidGroupResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'group',
- self.assertValidGroup,
- keys_to_check=['name', 'description', 'domain_id'],
- *args,
- **kwargs)
-
- def assertValidGroup(self, entity, ref=None):
- self.assertIsNotNone(entity.get('name'))
- if ref:
- self.assertEqual(ref['name'], entity['name'])
- return entity
-
- # credential validation
-
- def assertValidCredentialListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'credentials',
- self.assertValidCredential,
- keys_to_check=['blob', 'user_id', 'type'],
- *args,
- **kwargs)
-
- def assertValidCredentialResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'credential',
- self.assertValidCredential,
- keys_to_check=['blob', 'user_id', 'type'],
- *args,
- **kwargs)
-
- def assertValidCredential(self, entity, ref=None):
- self.assertIsNotNone(entity.get('user_id'))
- self.assertIsNotNone(entity.get('blob'))
- self.assertIsNotNone(entity.get('type'))
- if ref:
- self.assertEqual(ref['user_id'], entity['user_id'])
- self.assertEqual(ref['blob'], entity['blob'])
- self.assertEqual(ref['type'], entity['type'])
- self.assertEqual(ref.get('project_id'), entity.get('project_id'))
- return entity
-
- # role validation
-
- def assertValidRoleListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'roles',
- self.assertValidRole,
- keys_to_check=['name'],
- *args,
- **kwargs)
-
- def assertRoleInListResponse(self, resp, ref, expected=1):
- found_count = 0
- for entity in resp.result.get('roles'):
- try:
- self.assertValidRole(entity, ref=ref)
- except Exception:
- # It doesn't match, so let's go onto the next one
- pass
- else:
- found_count += 1
- self.assertEqual(expected, found_count)
-
- def assertRoleNotInListResponse(self, resp, ref):
- self.assertRoleInListResponse(resp, ref=ref, expected=0)
-
- def assertValidRoleResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'role',
- self.assertValidRole,
- keys_to_check=['name'],
- *args,
- **kwargs)
-
- def assertValidRole(self, entity, ref=None):
- self.assertIsNotNone(entity.get('name'))
- if ref:
- self.assertEqual(ref['name'], entity['name'])
- self.assertEqual(ref['domain_id'], entity['domain_id'])
- return entity
-
- # role assignment validation
-
- def assertValidRoleAssignmentListResponse(self, resp, expected_length=None,
- resource_url=None):
- entities = resp.result.get('role_assignments')
-
- if expected_length:
- self.assertEqual(expected_length, len(entities))
-
- # Collections should have relational links
- self.assertValidListLinks(resp.result.get('links'),
- resource_url=resource_url)
-
- for entity in entities:
- self.assertIsNotNone(entity)
- self.assertValidRoleAssignment(entity)
- return entities
-
- def assertValidRoleAssignment(self, entity, ref=None):
- # A role should be present
- self.assertIsNotNone(entity.get('role'))
- self.assertIsNotNone(entity['role'].get('id'))
-
- # Only one of user or group should be present
- if entity.get('user'):
- self.assertIsNone(entity.get('group'))
- self.assertIsNotNone(entity['user'].get('id'))
- else:
- self.assertIsNotNone(entity.get('group'))
- self.assertIsNotNone(entity['group'].get('id'))
-
- # A scope should be present and have only one of domain or project
- self.assertIsNotNone(entity.get('scope'))
-
- if entity['scope'].get('project'):
- self.assertIsNone(entity['scope'].get('domain'))
- self.assertIsNotNone(entity['scope']['project'].get('id'))
- else:
- self.assertIsNotNone(entity['scope'].get('domain'))
- self.assertIsNotNone(entity['scope']['domain'].get('id'))
-
- # An assignment link should be present
- self.assertIsNotNone(entity.get('links'))
- self.assertIsNotNone(entity['links'].get('assignment'))
-
- if ref:
- links = ref.pop('links')
- try:
- self.assertDictContainsSubset(ref, entity)
- self.assertIn(links['assignment'],
- entity['links']['assignment'])
- finally:
- if links:
- ref['links'] = links
-
- def assertRoleAssignmentInListResponse(self, resp, ref, expected=1):
-
- found_count = 0
- for entity in resp.result.get('role_assignments'):
- try:
- self.assertValidRoleAssignment(entity, ref=ref)
- except Exception:
- # It doesn't match, so let's go onto the next one
- pass
- else:
- found_count += 1
- self.assertEqual(expected, found_count)
-
- def assertRoleAssignmentNotInListResponse(self, resp, ref):
- self.assertRoleAssignmentInListResponse(resp, ref=ref, expected=0)
-
- # policy validation
-
- def assertValidPolicyListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'policies',
- self.assertValidPolicy,
- *args,
- **kwargs)
-
- def assertValidPolicyResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'policy',
- self.assertValidPolicy,
- *args,
- **kwargs)
-
- def assertValidPolicy(self, entity, ref=None):
- self.assertIsNotNone(entity.get('blob'))
- self.assertIsNotNone(entity.get('type'))
- if ref:
- self.assertEqual(ref['blob'], entity['blob'])
- self.assertEqual(ref['type'], entity['type'])
- return entity
-
- # trust validation
-
- def assertValidTrustListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'trusts',
- self.assertValidTrustSummary,
- keys_to_check=['trustor_user_id',
- 'trustee_user_id',
- 'impersonation'],
- *args,
- **kwargs)
-
- def assertValidTrustResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'trust',
- self.assertValidTrust,
- keys_to_check=['trustor_user_id',
- 'trustee_user_id',
- 'impersonation'],
- *args,
- **kwargs)
-
- def assertValidTrustSummary(self, entity, ref=None):
- return self.assertValidTrust(entity, ref, summary=True)
-
- def assertValidTrust(self, entity, ref=None, summary=False):
- self.assertIsNotNone(entity.get('trustor_user_id'))
- self.assertIsNotNone(entity.get('trustee_user_id'))
- self.assertIsNotNone(entity.get('impersonation'))
-
- self.assertIn('expires_at', entity)
- if entity['expires_at'] is not None:
- self.assertValidISO8601ExtendedFormatDatetime(entity['expires_at'])
-
- if summary:
- # Trust list contains no roles, but getting a specific
- # trust by ID provides the detailed response containing roles
- self.assertNotIn('roles', entity)
- self.assertIn('project_id', entity)
- else:
- for role in entity['roles']:
- self.assertIsNotNone(role)
- self.assertValidEntity(role, keys_to_check=['name'])
- self.assertValidRole(role)
-
- self.assertValidListLinks(entity.get('roles_links'))
-
- # always disallow role xor project_id (neither or both is allowed)
- has_roles = bool(entity.get('roles'))
- has_project = bool(entity.get('project_id'))
- self.assertFalse(has_roles ^ has_project)
-
- if ref:
- self.assertEqual(ref['trustor_user_id'], entity['trustor_user_id'])
- self.assertEqual(ref['trustee_user_id'], entity['trustee_user_id'])
- self.assertEqual(ref['project_id'], entity['project_id'])
- if entity.get('expires_at') or ref.get('expires_at'):
- entity_exp = self.assertValidISO8601ExtendedFormatDatetime(
- entity['expires_at'])
- ref_exp = self.assertValidISO8601ExtendedFormatDatetime(
- ref['expires_at'])
- self.assertCloseEnoughForGovernmentWork(entity_exp, ref_exp)
- else:
- self.assertEqual(ref.get('expires_at'),
- entity.get('expires_at'))
-
- return entity
-
- # Service providers (federation)
-
- def assertValidServiceProvider(self, entity, ref=None, *args, **kwargs):
-
- attributes = frozenset(['auth_url', 'id', 'enabled', 'description',
- 'links', 'relay_state_prefix', 'sp_url'])
- for attribute in attributes:
- self.assertIsNotNone(entity.get(attribute))
-
- def assertValidServiceProviderListResponse(self, resp, *args, **kwargs):
- if kwargs.get('keys_to_check') is None:
- kwargs['keys_to_check'] = ['auth_url', 'id', 'enabled',
- 'description', 'relay_state_prefix',
- 'sp_url']
- return self.assertValidListResponse(
- resp,
- 'service_providers',
- self.assertValidServiceProvider,
- *args,
- **kwargs)
-
- def build_external_auth_request(self, remote_user,
- remote_domain=None, auth_data=None,
- kerberos=False):
- context = {'environment': {'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'}}
- if remote_domain:
- context['environment']['REMOTE_DOMAIN'] = remote_domain
- if not auth_data:
- auth_data = self.build_authentication_request(
- kerberos=kerberos)['auth']
- no_context = None
- auth_info = auth.controllers.AuthInfo.create(no_context, auth_data)
- auth_context = {'extras': {}, 'method_names': []}
- return context, auth_info, auth_context
-
-
-class VersionTestCase(RestfulTestCase):
- def test_get_version(self):
- pass
-
-
-# NOTE(morganfainberg): To be removed when admin_token_auth is removed. This
-# has been split out to allow testing admin_token auth without enabling it
-# for other tests.
-class AuthContextMiddlewareAdminTokenTestCase(RestfulTestCase):
- EXTENSION_TO_ADD = 'admin_token_auth'
-
- def config_overrides(self):
- super(AuthContextMiddlewareAdminTokenTestCase, self).config_overrides()
- self.config_fixture.config(
- admin_token='ADMIN')
-
- # NOTE(morganfainberg): This is knowingly copied from below for simplicity
- # during the deprecation cycle.
- def _middleware_request(self, token, extra_environ=None):
-
- def application(environ, start_response):
- body = b'body'
- headers = [('Content-Type', 'text/html; charset=utf8'),
- ('Content-Length', str(len(body)))]
- start_response('200 OK', headers)
- return [body]
-
- app = webtest.TestApp(middleware.AuthContextMiddleware(application),
- extra_environ=extra_environ)
- resp = app.get('/', headers={middleware.AUTH_TOKEN_HEADER: token})
- self.assertEqual('body', resp.text) # just to make sure it worked
- return resp.request
-
- def test_admin_auth_context(self):
- # test to make sure AuthContextMiddleware does not attempt to build the
- # auth context if the admin_token middleware indicates it's admin
- # already.
- token_id = uuid.uuid4().hex # token doesn't matter.
- # the admin_token middleware sets is_admin in the context.
- extra_environ = {middleware.CONTEXT_ENV: {'is_admin': True}}
- req = self._middleware_request(token_id, extra_environ)
- auth_context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self.assertDictEqual({}, auth_context)
-
- @mock.patch.object(middleware_auth.versionutils,
- 'report_deprecated_feature')
- def test_admin_token_auth_context_deprecated(self, mock_report_deprecated):
- # For backwards compatibility AuthContextMiddleware will check that the
- # admin token (as configured in the CONF file) is present and not
- # attempt to build the auth context. This is deprecated.
- req = self._middleware_request('ADMIN')
- auth_context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
- self.assertDictEqual({}, auth_context)
- self.assertEqual(1, mock_report_deprecated.call_count)
-
-
-# NOTE(gyee): test AuthContextMiddleware here instead of test_middleware.py
-# because we need the token
-class AuthContextMiddlewareTestCase(RestfulTestCase):
-
- def _middleware_request(self, token, extra_environ=None):
-
- def application(environ, start_response):
- body = b'body'
- headers = [('Content-Type', 'text/html; charset=utf8'),
- ('Content-Length', str(len(body)))]
- start_response('200 OK', headers)
- return [body]
-
- app = webtest.TestApp(middleware.AuthContextMiddleware(application),
- extra_environ=extra_environ)
- resp = app.get('/', headers={middleware.AUTH_TOKEN_HEADER: token})
- self.assertEqual(b'body', resp.body) # just to make sure it worked
- return resp.request
-
- def test_auth_context_build_by_middleware(self):
- # test to make sure AuthContextMiddleware successful build the auth
- # context from the incoming auth token
- admin_token = self.get_scoped_token()
- req = self._middleware_request(admin_token)
- self.assertEqual(
- self.user['id'],
- req.environ.get(authorization.AUTH_CONTEXT_ENV)['user_id'])
-
- def test_auth_context_override(self):
- overridden_context = 'OVERRIDDEN_CONTEXT'
- # this token should not be used
- token = uuid.uuid4().hex
-
- extra_environ = {authorization.AUTH_CONTEXT_ENV: overridden_context}
- req = self._middleware_request(token, extra_environ=extra_environ)
- # make sure overridden context take precedence
- self.assertEqual(overridden_context,
- req.environ.get(authorization.AUTH_CONTEXT_ENV))
-
- def test_unscoped_token_auth_context(self):
- unscoped_token = self.get_unscoped_token()
- req = self._middleware_request(unscoped_token)
- for key in ['project_id', 'domain_id', 'domain_name']:
- self.assertNotIn(
- key,
- req.environ.get(authorization.AUTH_CONTEXT_ENV))
-
- def test_project_scoped_token_auth_context(self):
- project_scoped_token = self.get_scoped_token()
- req = self._middleware_request(project_scoped_token)
- self.assertEqual(
- self.project['id'],
- req.environ.get(authorization.AUTH_CONTEXT_ENV)['project_id'])
-
- def test_domain_scoped_token_auth_context(self):
- # grant the domain role to user
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
-
- domain_scoped_token = self.get_domain_scoped_token()
- req = self._middleware_request(domain_scoped_token)
- self.assertEqual(
- self.domain['id'],
- req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_id'])
- self.assertEqual(
- self.domain['name'],
- req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_name'])
-
- def test_oslo_context(self):
- # After AuthContextMiddleware runs, an
- # oslo_context.context.RequestContext was created so that its fields
- # can be logged. This test validates that the RequestContext was
- # created and the fields are set as expected.
-
- # Use a scoped token so more fields can be set.
- token = self.get_scoped_token()
-
- # oslo_middleware RequestId middleware sets openstack.request_id.
- request_id = uuid.uuid4().hex
- environ = {'openstack.request_id': request_id}
- self._middleware_request(token, extra_environ=environ)
-
- req_context = oslo_context.context.get_current()
- self.assertEqual(request_id, req_context.request_id)
- self.assertEqual(token, req_context.auth_token)
- self.assertEqual(self.user['id'], req_context.user)
- self.assertEqual(self.project['id'], req_context.tenant)
- self.assertIsNone(req_context.domain)
- self.assertEqual(self.user['domain_id'], req_context.user_domain)
- self.assertEqual(self.project['domain_id'], req_context.project_domain)
- self.assertFalse(req_context.is_admin)
-
-
-class JsonHomeTestMixin(object):
- """JSON Home test
-
- Mixin this class to provide a test for the JSON-Home response for an
- extension.
-
- The base class must set JSON_HOME_DATA to a dict of relationship URLs
- (rels) to the JSON-Home data for the relationship. The rels and associated
- data must be in the response.
-
- """
-
- def test_get_json_home(self):
- resp = self.get('/', convert=False,
- headers={'Accept': 'application/json-home'})
- self.assertThat(resp.headers['Content-Type'],
- matchers.Equals('application/json-home'))
- resp_data = jsonutils.loads(resp.body)
-
- # Check that the example relationships are present.
- for rel in self.JSON_HOME_DATA:
- self.assertThat(resp_data['resources'][rel],
- matchers.Equals(self.JSON_HOME_DATA[rel]))
-
-
-class AssignmentTestMixin(object):
- """To hold assignment helper functions."""
-
- def build_role_assignment_query_url(self, effective=False, **filters):
- """Build and return a role assignment query url with provided params.
-
- Available filters are: domain_id, project_id, user_id, group_id,
- role_id and inherited_to_projects.
- """
- query_params = '?effective' if effective else ''
-
- for k, v in filters.items():
- query_params += '?' if not query_params else '&'
-
- if k == 'inherited_to_projects':
- query_params += 'scope.OS-INHERIT:inherited_to=projects'
- else:
- if k in ['domain_id', 'project_id']:
- query_params += 'scope.'
- elif k not in ['user_id', 'group_id', 'role_id']:
- raise ValueError(
- 'Invalid key \'%s\' in provided filters.' % k)
-
- query_params += '%s=%s' % (k.replace('_', '.'), v)
-
- return '/role_assignments%s' % query_params
-
- def build_role_assignment_link(self, **attribs):
- """Build and return a role assignment link with provided attributes.
-
- Provided attributes are expected to contain: domain_id or project_id,
- user_id or group_id, role_id and, optionally, inherited_to_projects.
- """
- if attribs.get('domain_id'):
- link = '/domains/' + attribs['domain_id']
- else:
- link = '/projects/' + attribs['project_id']
-
- if attribs.get('user_id'):
- link += '/users/' + attribs['user_id']
- else:
- link += '/groups/' + attribs['group_id']
-
- link += '/roles/' + attribs['role_id']
-
- if attribs.get('inherited_to_projects'):
- return '/OS-INHERIT%s/inherited_to_projects' % link
-
- return link
-
- def build_role_assignment_entity(
- self, link=None, prior_role_link=None, **attribs):
- """Build and return a role assignment entity with provided attributes.
-
- Provided attributes are expected to contain: domain_id or project_id,
- user_id or group_id, role_id and, optionally, inherited_to_projects.
- """
- entity = {'links': {'assignment': (
- link or self.build_role_assignment_link(**attribs))}}
-
- if attribs.get('domain_id'):
- entity['scope'] = {'domain': {'id': attribs['domain_id']}}
- else:
- entity['scope'] = {'project': {'id': attribs['project_id']}}
-
- if attribs.get('user_id'):
- entity['user'] = {'id': attribs['user_id']}
-
- if attribs.get('group_id'):
- entity['links']['membership'] = ('/groups/%s/users/%s' %
- (attribs['group_id'],
- attribs['user_id']))
- else:
- entity['group'] = {'id': attribs['group_id']}
-
- entity['role'] = {'id': attribs['role_id']}
-
- if attribs.get('inherited_to_projects'):
- entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
-
- if prior_role_link:
- entity['links']['prior_role'] = prior_role_link
-
- return entity
-
- def build_role_assignment_entity_include_names(self,
- domain_ref=None,
- role_ref=None,
- group_ref=None,
- user_ref=None,
- project_ref=None,
- inherited_assignment=None):
- """Build and return a role assignment entity with provided attributes.
-
- The expected attributes are: domain_ref or project_ref,
- user_ref or group_ref, role_ref and, optionally, inherited_to_projects.
- """
- entity = {'links': {}}
- attributes_for_links = {}
- if project_ref:
- dmn_name = self.resource_api.get_domain(
- project_ref['domain_id'])['name']
-
- entity['scope'] = {'project': {
- 'id': project_ref['id'],
- 'name': project_ref['name'],
- 'domain': {
- 'id': project_ref['domain_id'],
- 'name': dmn_name}}}
- attributes_for_links['project_id'] = project_ref['id']
- else:
- entity['scope'] = {'domain': {'id': domain_ref['id'],
- 'name': domain_ref['name']}}
- attributes_for_links['domain_id'] = domain_ref['id']
- if user_ref:
- dmn_name = self.resource_api.get_domain(
- user_ref['domain_id'])['name']
- entity['user'] = {'id': user_ref['id'],
- 'name': user_ref['name'],
- 'domain': {'id': user_ref['domain_id'],
- 'name': dmn_name}}
- attributes_for_links['user_id'] = user_ref['id']
- else:
- dmn_name = self.resource_api.get_domain(
- group_ref['domain_id'])['name']
- entity['group'] = {'id': group_ref['id'],
- 'name': group_ref['name'],
- 'domain': {
- 'id': group_ref['domain_id'],
- 'name': dmn_name}}
- attributes_for_links['group_id'] = group_ref['id']
-
- if role_ref:
- entity['role'] = {'id': role_ref['id'],
- 'name': role_ref['name']}
- attributes_for_links['role_id'] = role_ref['id']
-
- if inherited_assignment:
- entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
- attributes_for_links['inherited_to_projects'] = True
-
- entity['links']['assignment'] = self.build_role_assignment_link(
- **attributes_for_links)
-
- return entity
diff --git a/keystone-moon/keystone/tests/unit/test_v3_assignment.py b/keystone-moon/keystone/tests/unit/test_v3_assignment.py
deleted file mode 100644
index 86fb9f74..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_assignment.py
+++ /dev/null
@@ -1,2871 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import random
-import uuid
-
-from oslo_config import cfg
-from six.moves import http_client
-from six.moves import range
-from testtools import matchers
-
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-CONF = cfg.CONF
-
-
-class AssignmentTestCase(test_v3.RestfulTestCase,
- test_v3.AssignmentTestMixin):
- """Test roles and role assignments."""
-
- def setUp(self):
- super(AssignmentTestCase, self).setUp()
-
- self.group = unit.new_group_ref(domain_id=self.domain_id)
- self.group = self.identity_api.create_group(self.group)
- self.group_id = self.group['id']
-
- # Role CRUD tests
-
- def test_create_role(self):
- """Call ``POST /roles``."""
- ref = unit.new_role_ref()
- r = self.post(
- '/roles',
- body={'role': ref})
- return self.assertValidRoleResponse(r, ref)
-
- def test_create_role_bad_request(self):
- """Call ``POST /roles``."""
- self.post('/roles', body={'role': {}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_roles(self):
- """Call ``GET /roles``."""
- resource_url = '/roles'
- r = self.get(resource_url)
- self.assertValidRoleListResponse(r, ref=self.role,
- resource_url=resource_url)
-
- def test_get_role(self):
- """Call ``GET /roles/{role_id}``."""
- r = self.get('/roles/%(role_id)s' % {
- 'role_id': self.role_id})
- self.assertValidRoleResponse(r, self.role)
-
- def test_update_role(self):
- """Call ``PATCH /roles/{role_id}``."""
- ref = unit.new_role_ref()
- del ref['id']
- r = self.patch('/roles/%(role_id)s' % {
- 'role_id': self.role_id},
- body={'role': ref})
- self.assertValidRoleResponse(r, ref)
-
- def test_delete_role(self):
- """Call ``DELETE /roles/{role_id}``."""
- self.delete('/roles/%(role_id)s' % {
- 'role_id': self.role_id})
-
- def test_create_member_role(self):
- """Call ``POST /roles``."""
- # specify only the name on creation
- ref = unit.new_role_ref(name=CONF.member_role_name)
- r = self.post(
- '/roles',
- body={'role': ref})
- self.assertValidRoleResponse(r, ref)
-
- # but the ID should be set as defined in CONF
- self.assertEqual(CONF.member_role_id, r.json['role']['id'])
-
- # Role Grants tests
-
- def test_crud_user_project_role_grants(self):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- collection_url = (
- '/projects/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': self.project['id'],
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': role['id']}
-
- # There is a role assignment for self.user on self.project
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=self.role,
- expected_length=1)
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=role,
- resource_url=collection_url,
- expected_length=2)
-
- self.delete(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=self.role, expected_length=1)
- self.assertIn(collection_url, r.result['links']['self'])
-
- def test_crud_user_project_role_grants_no_user(self):
- """Grant role on a project to a user that doesn't exist.
-
- When grant a role on a project to a user that doesn't exist, the server
- returns Not Found for the user.
-
- """
- user_id = uuid.uuid4().hex
-
- collection_url = (
- '/projects/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': self.project['id'], 'user_id': user_id})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- self.put(member_url, expected_status=http_client.NOT_FOUND)
-
- def test_crud_user_domain_role_grants(self):
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=self.role,
- resource_url=collection_url)
-
- self.delete(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, expected_length=0,
- resource_url=collection_url)
-
- def test_crud_user_domain_role_grants_no_user(self):
- """Grant role on a domain to a user that doesn't exist.
-
- When grant a role on a domain to a user that doesn't exist, the server
- returns 404 Not Found for the user.
-
- """
- user_id = uuid.uuid4().hex
-
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domain_id, 'user_id': user_id})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- self.put(member_url, expected_status=http_client.NOT_FOUND)
-
- def test_crud_group_project_role_grants(self):
- collection_url = (
- '/projects/%(project_id)s/groups/%(group_id)s/roles' % {
- 'project_id': self.project_id,
- 'group_id': self.group_id})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=self.role,
- resource_url=collection_url)
-
- self.delete(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, expected_length=0,
- resource_url=collection_url)
-
- def test_crud_group_project_role_grants_no_group(self):
- """Grant role on a project to a group that doesn't exist.
-
- When grant a role on a project to a group that doesn't exist, the
- server returns 404 Not Found for the group.
-
- """
- group_id = uuid.uuid4().hex
-
- collection_url = (
- '/projects/%(project_id)s/groups/%(group_id)s/roles' % {
- 'project_id': self.project_id,
- 'group_id': group_id})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- self.put(member_url, expected_status=http_client.NOT_FOUND)
-
- def test_crud_group_domain_role_grants(self):
- collection_url = (
- '/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'group_id': self.group_id})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=self.role,
- resource_url=collection_url)
-
- self.delete(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, expected_length=0,
- resource_url=collection_url)
-
- def test_crud_group_domain_role_grants_no_group(self):
- """Grant role on a domain to a group that doesn't exist.
-
- When grant a role on a domain to a group that doesn't exist, the server
- returns 404 Not Found for the group.
-
- """
- group_id = uuid.uuid4().hex
-
- collection_url = (
- '/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'group_id': group_id})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- self.put(member_url, expected_status=http_client.NOT_FOUND)
-
- def _create_new_user_and_assign_role_on_project(self):
- """Create a new user and assign user a role on a project."""
- # Create a new user
- new_user = unit.new_user_ref(domain_id=self.domain_id)
- user_ref = self.identity_api.create_user(new_user)
- # Assign the user a role on the project
- collection_url = (
- '/projects/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': self.project_id,
- 'user_id': user_ref['id']})
- member_url = ('%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id})
- self.put(member_url)
- # Check the user has the role assigned
- self.head(member_url)
- return member_url, user_ref
-
- def test_delete_user_before_removing_role_assignment_succeeds(self):
- """Call ``DELETE`` on the user before the role assignment."""
- member_url, user = self._create_new_user_and_assign_role_on_project()
- # Delete the user from identity backend
- self.identity_api.driver.delete_user(user['id'])
- # Clean up the role assignment
- self.delete(member_url)
- # Make sure the role is gone
- self.head(member_url, expected_status=http_client.NOT_FOUND)
-
- def test_delete_user_and_check_role_assignment_fails(self):
- """Call ``DELETE`` on the user and check the role assignment."""
- member_url, user = self._create_new_user_and_assign_role_on_project()
- # Delete the user from identity backend
- self.identity_api.delete_user(user['id'])
- # We should get a 404 Not Found when looking for the user in the
- # identity backend because we're not performing a delete operation on
- # the role.
- self.head(member_url, expected_status=http_client.NOT_FOUND)
-
- def test_token_revoked_once_group_role_grant_revoked(self):
- """Test token is revoked when group role grant is revoked
-
- When a role granted to a group is revoked for a given scope,
- all tokens related to this scope and belonging to one of the members
- of this group should be revoked.
-
- The revocation should be independently to the presence
- of the revoke API.
- """
- # creates grant from group on project.
- self.assignment_api.create_grant(role_id=self.role['id'],
- project_id=self.project['id'],
- group_id=self.group['id'])
-
- # adds user to the group.
- self.identity_api.add_user_to_group(user_id=self.user['id'],
- group_id=self.group['id'])
-
- # creates a token for the user
- auth_body = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- token_resp = self.post('/auth/tokens', body=auth_body)
- token = token_resp.headers.get('x-subject-token')
-
- # validates the returned token; it should be valid.
- self.head('/auth/tokens',
- headers={'x-subject-token': token},
- expected_status=http_client.OK)
-
- # revokes the grant from group on project.
- self.assignment_api.delete_grant(role_id=self.role['id'],
- project_id=self.project['id'],
- group_id=self.group['id'])
-
- # validates the same token again; it should not longer be valid.
- self.head('/auth/tokens',
- headers={'x-subject-token': token},
- expected_status=http_client.NOT_FOUND)
-
- @unit.skip_if_cache_disabled('assignment')
- def test_delete_grant_from_user_and_project_invalidate_cache(self):
- # create a new project
- new_project = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(new_project['id'], new_project)
-
- collection_url = (
- '/projects/%(project_id)s/users/%(user_id)s/roles' % {
- 'project_id': new_project['id'],
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- # create the user a grant on the new project
- self.put(member_url)
-
- # check the grant that was just created
- self.head(member_url)
- resp = self.get(collection_url)
- self.assertValidRoleListResponse(resp, ref=self.role,
- resource_url=collection_url)
-
- # delete the grant
- self.delete(member_url)
-
- # get the collection and ensure there are no roles on the project
- resp = self.get(collection_url)
- self.assertListEqual(resp.json_body['roles'], [])
-
- @unit.skip_if_cache_disabled('assignment')
- def test_delete_grant_from_user_and_domain_invalidates_cache(self):
- # create a new domain
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
-
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': new_domain['id'],
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- # create the user a grant on the new domain
- self.put(member_url)
-
- # check the grant that was just created
- self.head(member_url)
- resp = self.get(collection_url)
- self.assertValidRoleListResponse(resp, ref=self.role,
- resource_url=collection_url)
-
- # delete the grant
- self.delete(member_url)
-
- # get the collection and ensure there are no roles on the domain
- resp = self.get(collection_url)
- self.assertListEqual(resp.json_body['roles'], [])
-
- @unit.skip_if_cache_disabled('assignment')
- def test_delete_grant_from_group_and_project_invalidates_cache(self):
- # create a new project
- new_project = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(new_project['id'], new_project)
-
- collection_url = (
- '/projects/%(project_id)s/groups/%(group_id)s/roles' % {
- 'project_id': new_project['id'],
- 'group_id': self.group['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- # create the group a grant on the new project
- self.put(member_url)
-
- # check the grant that was just created
- self.head(member_url)
- resp = self.get(collection_url)
- self.assertValidRoleListResponse(resp, ref=self.role,
- resource_url=collection_url)
-
- # delete the grant
- self.delete(member_url)
-
- # get the collection and ensure there are no roles on the project
- resp = self.get(collection_url)
- self.assertListEqual(resp.json_body['roles'], [])
-
- @unit.skip_if_cache_disabled('assignment')
- def test_delete_grant_from_group_and_domain_invalidates_cache(self):
- # create a new domain
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
-
- collection_url = (
- '/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
- 'domain_id': new_domain['id'],
- 'group_id': self.group['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
-
- # create the group a grant on the new domain
- self.put(member_url)
-
- # check the grant that was just created
- self.head(member_url)
- resp = self.get(collection_url)
- self.assertValidRoleListResponse(resp, ref=self.role,
- resource_url=collection_url)
-
- # delete the grant
- self.delete(member_url)
-
- # get the collection and ensure there are no roles on the domain
- resp = self.get(collection_url)
- self.assertListEqual(resp.json_body['roles'], [])
-
- # Role Assignments tests
-
- def test_get_role_assignments(self):
- """Call ``GET /role_assignments``.
-
- The sample data set up already has a user, group and project
- that is part of self.domain. We use these plus a new user
- we create as our data set, making sure we ignore any
- role assignments that are already in existence.
-
- Since we don't yet support a first class entity for role
- assignments, we are only testing the LIST API. To create
- and delete the role assignments we use the old grant APIs.
-
- Test Plan:
-
- - Create extra user for tests
- - Get a list of all existing role assignments
- - Add a new assignment for each of the four combinations, i.e.
- group+domain, user+domain, group+project, user+project, using
- the same role each time
- - Get a new list of all role assignments, checking these four new
- ones have been added
- - Then delete the four we added
- - Get a new list of all role assignments, checking the four have
- been removed
-
- """
- # Since the default fixtures already assign some roles to the
- # user it creates, we also need a new user that will not have any
- # existing assignments
- user1 = unit.new_user_ref(domain_id=self.domain['id'])
- user1 = self.identity_api.create_user(user1)
-
- collection_url = '/role_assignments'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- resource_url=collection_url)
- existing_assignments = len(r.result.get('role_assignments'))
-
- # Now add one of each of the four types of assignment, making sure
- # that we get them all back.
- gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
- group_id=self.group_id,
- role_id=self.role_id)
- self.put(gd_entity['links']['assignment'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 1,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
-
- ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
- user_id=user1['id'],
- role_id=self.role_id)
- self.put(ud_entity['links']['assignment'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 2,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
-
- gp_entity = self.build_role_assignment_entity(
- project_id=self.project_id, group_id=self.group_id,
- role_id=self.role_id)
- self.put(gp_entity['links']['assignment'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 3,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, gp_entity)
-
- up_entity = self.build_role_assignment_entity(
- project_id=self.project_id, user_id=user1['id'],
- role_id=self.role_id)
- self.put(up_entity['links']['assignment'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 4,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, up_entity)
-
- # Now delete the four we added and make sure they are removed
- # from the collection.
-
- self.delete(gd_entity['links']['assignment'])
- self.delete(ud_entity['links']['assignment'])
- self.delete(gp_entity['links']['assignment'])
- self.delete(up_entity['links']['assignment'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments,
- resource_url=collection_url)
- self.assertRoleAssignmentNotInListResponse(r, gd_entity)
- self.assertRoleAssignmentNotInListResponse(r, ud_entity)
- self.assertRoleAssignmentNotInListResponse(r, gp_entity)
- self.assertRoleAssignmentNotInListResponse(r, up_entity)
-
- def test_get_effective_role_assignments(self):
- """Call ``GET /role_assignments?effective``.
-
- Test Plan:
-
- - Create two extra user for tests
- - Add these users to a group
- - Add a role assignment for the group on a domain
- - Get a list of all role assignments, checking one has been added
- - Then get a list of all effective role assignments - the group
- assignment should have turned into assignments on the domain
- for each of the group members.
-
- """
- user1 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
- user2 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
-
- self.identity_api.add_user_to_group(user1['id'], self.group['id'])
- self.identity_api.add_user_to_group(user2['id'], self.group['id'])
-
- collection_url = '/role_assignments'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- resource_url=collection_url)
- existing_assignments = len(r.result.get('role_assignments'))
-
- gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
- group_id=self.group_id,
- role_id=self.role_id)
- self.put(gd_entity['links']['assignment'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 1,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
-
- # Now re-read the collection asking for effective roles - this
- # should mean the group assignment is translated into the two
- # member user assignments
- collection_url = '/role_assignments?effective'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 2,
- resource_url=collection_url)
- ud_entity = self.build_role_assignment_entity(
- link=gd_entity['links']['assignment'], domain_id=self.domain_id,
- user_id=user1['id'], role_id=self.role_id)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
- ud_entity = self.build_role_assignment_entity(
- link=gd_entity['links']['assignment'], domain_id=self.domain_id,
- user_id=user2['id'], role_id=self.role_id)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
-
- def test_check_effective_values_for_role_assignments(self):
- """Call ``GET /role_assignments?effective=value``.
-
- Check the various ways of specifying the 'effective'
- query parameter. If the 'effective' query parameter
- is included then this should always be treated as meaning 'True'
- unless it is specified as:
-
- {url}?effective=0
-
- This is by design to match the agreed way of handling
- policy checking on query/filter parameters.
-
- Test Plan:
-
- - Create two extra user for tests
- - Add these users to a group
- - Add a role assignment for the group on a domain
- - Get a list of all role assignments, checking one has been added
- - Then issue various request with different ways of defining
- the 'effective' query parameter. As we have tested the
- correctness of the data coming back when we get effective roles
- in other tests, here we just use the count of entities to
- know if we are getting effective roles or not
-
- """
- user1 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
- user2 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
-
- self.identity_api.add_user_to_group(user1['id'], self.group['id'])
- self.identity_api.add_user_to_group(user2['id'], self.group['id'])
-
- collection_url = '/role_assignments'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- resource_url=collection_url)
- existing_assignments = len(r.result.get('role_assignments'))
-
- gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
- group_id=self.group_id,
- role_id=self.role_id)
- self.put(gd_entity['links']['assignment'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 1,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
-
- # Now re-read the collection asking for effective roles,
- # using the most common way of defining "effective'. This
- # should mean the group assignment is translated into the two
- # member user assignments
- collection_url = '/role_assignments?effective'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 2,
- resource_url=collection_url)
- # Now set 'effective' to false explicitly - should get
- # back the regular roles
- collection_url = '/role_assignments?effective=0'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 1,
- resource_url=collection_url)
- # Now try setting 'effective' to 'False' explicitly- this is
- # NOT supported as a way of setting a query or filter
- # parameter to false by design. Hence we should get back
- # effective roles.
- collection_url = '/role_assignments?effective=False'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 2,
- resource_url=collection_url)
- # Now set 'effective' to True explicitly
- collection_url = '/role_assignments?effective=True'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r,
- expected_length=existing_assignments + 2,
- resource_url=collection_url)
-
- def test_filtered_role_assignments(self):
- """Call ``GET /role_assignments?filters``.
-
- Test Plan:
-
- - Create extra users, group, role and project for tests
- - Make the following assignments:
- Give group1, role1 on project1 and domain
- Give user1, role2 on project1 and domain
- Make User1 a member of Group1
- - Test a series of single filter list calls, checking that
- the correct results are obtained
- - Test a multi-filtered list call
- - Test listing all effective roles for a given user
- - Test the equivalent of the list of roles in a project scoped
- token (all effective roles for a user on a project)
-
- """
- # Since the default fixtures already assign some roles to the
- # user it creates, we also need a new user that will not have any
- # existing assignments
- user1 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
- user2 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
-
- group1 = unit.new_group_ref(domain_id=self.domain['id'])
- group1 = self.identity_api.create_group(group1)
- self.identity_api.add_user_to_group(user1['id'], group1['id'])
- self.identity_api.add_user_to_group(user2['id'], group1['id'])
- project1 = unit.new_project_ref(domain_id=self.domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- self.role1 = unit.new_role_ref()
- self.role_api.create_role(self.role1['id'], self.role1)
- self.role2 = unit.new_role_ref()
- self.role_api.create_role(self.role2['id'], self.role2)
-
- # Now add one of each of the four types of assignment
-
- gd_entity = self.build_role_assignment_entity(
- domain_id=self.domain_id, group_id=group1['id'],
- role_id=self.role1['id'])
- self.put(gd_entity['links']['assignment'])
-
- ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
- user_id=user1['id'],
- role_id=self.role2['id'])
- self.put(ud_entity['links']['assignment'])
-
- gp_entity = self.build_role_assignment_entity(
- project_id=project1['id'],
- group_id=group1['id'],
- role_id=self.role1['id'])
- self.put(gp_entity['links']['assignment'])
-
- up_entity = self.build_role_assignment_entity(
- project_id=project1['id'],
- user_id=user1['id'],
- role_id=self.role2['id'])
- self.put(up_entity['links']['assignment'])
-
- # Now list by various filters to make sure we get back the right ones
-
- collection_url = ('/role_assignments?scope.project.id=%s' %
- project1['id'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, up_entity)
- self.assertRoleAssignmentInListResponse(r, gp_entity)
-
- collection_url = ('/role_assignments?scope.domain.id=%s' %
- self.domain['id'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
-
- collection_url = '/role_assignments?user.id=%s' % user1['id']
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, up_entity)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
-
- collection_url = '/role_assignments?group.id=%s' % group1['id']
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
- self.assertRoleAssignmentInListResponse(r, gp_entity)
-
- collection_url = '/role_assignments?role.id=%s' % self.role1['id']
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
- self.assertRoleAssignmentInListResponse(r, gp_entity)
-
- # Let's try combining two filers together....
-
- collection_url = (
- '/role_assignments?user.id=%(user_id)s'
- '&scope.project.id=%(project_id)s' % {
- 'user_id': user1['id'],
- 'project_id': project1['id']})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=1,
- resource_url=collection_url)
- self.assertRoleAssignmentInListResponse(r, up_entity)
-
- # Now for a harder one - filter for user with effective
- # roles - this should return role assignment that were directly
- # assigned as well as by virtue of group membership
-
- collection_url = ('/role_assignments?effective&user.id=%s' %
- user1['id'])
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=4,
- resource_url=collection_url)
- # Should have the two direct roles...
- self.assertRoleAssignmentInListResponse(r, up_entity)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
- # ...and the two via group membership...
- gp1_link = self.build_role_assignment_link(
- project_id=project1['id'],
- group_id=group1['id'],
- role_id=self.role1['id'])
- gd1_link = self.build_role_assignment_link(domain_id=self.domain_id,
- group_id=group1['id'],
- role_id=self.role1['id'])
-
- up1_entity = self.build_role_assignment_entity(
- link=gp1_link, project_id=project1['id'],
- user_id=user1['id'], role_id=self.role1['id'])
- ud1_entity = self.build_role_assignment_entity(
- link=gd1_link, domain_id=self.domain_id, user_id=user1['id'],
- role_id=self.role1['id'])
- self.assertRoleAssignmentInListResponse(r, up1_entity)
- self.assertRoleAssignmentInListResponse(r, ud1_entity)
-
- # ...and for the grand-daddy of them all, simulate the request
- # that would generate the list of effective roles in a project
- # scoped token.
-
- collection_url = (
- '/role_assignments?effective&user.id=%(user_id)s'
- '&scope.project.id=%(project_id)s' % {
- 'user_id': user1['id'],
- 'project_id': project1['id']})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
- # Should have one direct role and one from group membership...
- self.assertRoleAssignmentInListResponse(r, up_entity)
- self.assertRoleAssignmentInListResponse(r, up1_entity)
-
-
-class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
- test_v3.AssignmentTestMixin):
- """Base class for testing /v3/role_assignments API behavior."""
-
- MAX_HIERARCHY_BREADTH = 3
- MAX_HIERARCHY_DEPTH = CONF.max_project_tree_depth - 1
-
- def load_sample_data(self):
- """Creates sample data to be used on tests.
-
- Created data are i) a role and ii) a domain containing: a project
- hierarchy and 3 users within 3 groups.
-
- """
- def create_project_hierarchy(parent_id, depth):
- """Creates a random project hierarchy."""
- if depth == 0:
- return
-
- breadth = random.randint(1, self.MAX_HIERARCHY_BREADTH)
-
- subprojects = []
- for i in range(breadth):
- subprojects.append(unit.new_project_ref(
- domain_id=self.domain_id, parent_id=parent_id))
- self.resource_api.create_project(subprojects[-1]['id'],
- subprojects[-1])
-
- new_parent = subprojects[random.randint(0, breadth - 1)]
- create_project_hierarchy(new_parent['id'], depth - 1)
-
- super(RoleAssignmentBaseTestCase, self).load_sample_data()
-
- # Create a domain
- self.domain = unit.new_domain_ref()
- self.domain_id = self.domain['id']
- self.resource_api.create_domain(self.domain_id, self.domain)
-
- # Create a project hierarchy
- self.project = unit.new_project_ref(domain_id=self.domain_id)
- self.project_id = self.project['id']
- self.resource_api.create_project(self.project_id, self.project)
-
- # Create a random project hierarchy
- create_project_hierarchy(self.project_id,
- random.randint(1, self.MAX_HIERARCHY_DEPTH))
-
- # Create 3 users
- self.user_ids = []
- for i in range(3):
- user = unit.new_user_ref(domain_id=self.domain_id)
- user = self.identity_api.create_user(user)
- self.user_ids.append(user['id'])
-
- # Create 3 groups
- self.group_ids = []
- for i in range(3):
- group = unit.new_group_ref(domain_id=self.domain_id)
- group = self.identity_api.create_group(group)
- self.group_ids.append(group['id'])
-
- # Put 2 members on each group
- self.identity_api.add_user_to_group(user_id=self.user_ids[i],
- group_id=group['id'])
- self.identity_api.add_user_to_group(user_id=self.user_ids[i % 2],
- group_id=group['id'])
-
- self.assignment_api.create_grant(user_id=self.user_id,
- project_id=self.project_id,
- role_id=self.role_id)
-
- # Create a role
- self.role = unit.new_role_ref()
- self.role_id = self.role['id']
- self.role_api.create_role(self.role_id, self.role)
-
- # Set default user and group to be used on tests
- self.default_user_id = self.user_ids[0]
- self.default_group_id = self.group_ids[0]
-
- def get_role_assignments(self, expected_status=http_client.OK, **filters):
- """Returns the result from querying role assignment API + queried URL.
-
- Calls GET /v3/role_assignments?<params> and returns its result, where
- <params> is the HTTP query parameters form of effective option plus
- filters, if provided. Queried URL is returned as well.
-
- :returns: a tuple containing the list role assignments API response and
- queried URL.
-
- """
- query_url = self._get_role_assignments_query_url(**filters)
- response = self.get(query_url, expected_status=expected_status)
-
- return (response, query_url)
-
- def _get_role_assignments_query_url(self, **filters):
- """Returns non-effective role assignments query URL from given filters.
-
- :param filters: query parameters are created with the provided filters
- on role assignments attributes. Valid filters are:
- role_id, domain_id, project_id, group_id, user_id and
- inherited_to_projects.
-
- :returns: role assignments query URL.
-
- """
- return self.build_role_assignment_query_url(**filters)
-
-
-class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase):
- """Class for testing invalid query params on /v3/role_assignments API.
-
- Querying domain and project, or user and group results in a HTTP 400 Bad
- Request, since a role assignment must contain only a single pair of (actor,
- target). In addition, since filtering on role assignments applies only to
- the final result, effective mode cannot be combined with i) group or ii)
- domain and inherited, because it would always result in an empty list.
-
- """
-
- def test_get_role_assignments_by_domain_and_project(self):
- self.get_role_assignments(domain_id=self.domain_id,
- project_id=self.project_id,
- expected_status=http_client.BAD_REQUEST)
-
- def test_get_role_assignments_by_user_and_group(self):
- self.get_role_assignments(user_id=self.default_user_id,
- group_id=self.default_group_id,
- expected_status=http_client.BAD_REQUEST)
-
- def test_get_role_assignments_by_effective_and_inherited(self):
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- self.get_role_assignments(domain_id=self.domain_id, effective=True,
- inherited_to_projects=True,
- expected_status=http_client.BAD_REQUEST)
-
- def test_get_role_assignments_by_effective_and_group(self):
- self.get_role_assignments(effective=True,
- group_id=self.default_group_id,
- expected_status=http_client.BAD_REQUEST)
-
-
-class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase):
- """Class for testing direct assignments on /v3/role_assignments API.
-
- Direct assignments on a domain or project have effect on them directly,
- instead of on their project hierarchy, i.e they are non-inherited. In
- addition, group direct assignments are not expanded to group's users.
-
- Tests on this class make assertions on the representation and API filtering
- of direct assignments.
-
- """
-
- def _test_get_role_assignments(self, **filters):
- """Generic filtering test method.
-
- According to the provided filters, this method:
- - creates a new role assignment;
- - asserts that list role assignments API reponds correctly;
- - deletes the created role assignment.
-
- :param filters: filters to be considered when listing role assignments.
- Valid filters are: role_id, domain_id, project_id,
- group_id, user_id and inherited_to_projects.
-
- """
- # Fills default assignment with provided filters
- test_assignment = self._set_default_assignment_attributes(**filters)
-
- # Create new role assignment for this test
- self.assignment_api.create_grant(**test_assignment)
-
- # Get expected role assignments
- expected_assignments = self._list_expected_role_assignments(
- **test_assignment)
-
- # Get role assignments from API
- response, query_url = self.get_role_assignments(**test_assignment)
- self.assertValidRoleAssignmentListResponse(response,
- resource_url=query_url)
- self.assertEqual(len(expected_assignments),
- len(response.result.get('role_assignments')))
-
- # Assert that expected role assignments were returned by the API call
- for assignment in expected_assignments:
- self.assertRoleAssignmentInListResponse(response, assignment)
-
- # Delete created role assignment
- self.assignment_api.delete_grant(**test_assignment)
-
- def _set_default_assignment_attributes(self, **attribs):
- """Inserts default values for missing attributes of role assignment.
-
- If no actor, target or role are provided, they will default to values
- from sample data.
-
- :param attribs: info from a role assignment entity. Valid attributes
- are: role_id, domain_id, project_id, group_id, user_id
- and inherited_to_projects.
-
- """
- if not any(target in attribs
- for target in ('domain_id', 'projects_id')):
- attribs['project_id'] = self.project_id
-
- if not any(actor in attribs for actor in ('user_id', 'group_id')):
- attribs['user_id'] = self.default_user_id
-
- if 'role_id' not in attribs:
- attribs['role_id'] = self.role_id
-
- return attribs
-
- def _list_expected_role_assignments(self, **filters):
- """Given the filters, it returns expected direct role assignments.
-
- :param filters: filters that will be considered when listing role
- assignments. Valid filters are: role_id, domain_id,
- project_id, group_id, user_id and
- inherited_to_projects.
-
- :returns: the list of the expected role assignments.
-
- """
- return [self.build_role_assignment_entity(**filters)]
-
- # Test cases below call the generic test method, providing different filter
- # combinations. Filters are provided as specified in the method name, after
- # 'by'. For example, test_get_role_assignments_by_project_user_and_role
- # calls the generic test method with project_id, user_id and role_id.
-
- def test_get_role_assignments_by_domain(self, **filters):
- self._test_get_role_assignments(domain_id=self.domain_id, **filters)
-
- def test_get_role_assignments_by_project(self, **filters):
- self._test_get_role_assignments(project_id=self.project_id, **filters)
-
- def test_get_role_assignments_by_user(self, **filters):
- self._test_get_role_assignments(user_id=self.default_user_id,
- **filters)
-
- def test_get_role_assignments_by_group(self, **filters):
- self._test_get_role_assignments(group_id=self.default_group_id,
- **filters)
-
- def test_get_role_assignments_by_role(self, **filters):
- self._test_get_role_assignments(role_id=self.role_id, **filters)
-
- def test_get_role_assignments_by_domain_and_user(self, **filters):
- self.test_get_role_assignments_by_domain(user_id=self.default_user_id,
- **filters)
-
- def test_get_role_assignments_by_domain_and_group(self, **filters):
- self.test_get_role_assignments_by_domain(
- group_id=self.default_group_id, **filters)
-
- def test_get_role_assignments_by_project_and_user(self, **filters):
- self.test_get_role_assignments_by_project(user_id=self.default_user_id,
- **filters)
-
- def test_get_role_assignments_by_project_and_group(self, **filters):
- self.test_get_role_assignments_by_project(
- group_id=self.default_group_id, **filters)
-
- def test_get_role_assignments_by_domain_user_and_role(self, **filters):
- self.test_get_role_assignments_by_domain_and_user(role_id=self.role_id,
- **filters)
-
- def test_get_role_assignments_by_domain_group_and_role(self, **filters):
- self.test_get_role_assignments_by_domain_and_group(
- role_id=self.role_id, **filters)
-
- def test_get_role_assignments_by_project_user_and_role(self, **filters):
- self.test_get_role_assignments_by_project_and_user(
- role_id=self.role_id, **filters)
-
- def test_get_role_assignments_by_project_group_and_role(self, **filters):
- self.test_get_role_assignments_by_project_and_group(
- role_id=self.role_id, **filters)
-
-
-class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase):
- """Class for testing inherited assignments on /v3/role_assignments API.
-
- Inherited assignments on a domain or project have no effect on them
- directly, but on the projects under them instead.
-
- Tests on this class do not make assertions on the effect of inherited
- assignments, but in their representation and API filtering.
-
- """
-
- def config_overrides(self):
- super(RoleAssignmentBaseTestCase, self).config_overrides()
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- def _test_get_role_assignments(self, **filters):
- """Adds inherited_to_project filter to expected entity in tests."""
- super(RoleAssignmentInheritedTestCase,
- self)._test_get_role_assignments(inherited_to_projects=True,
- **filters)
-
-
-class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
- """Class for testing inheritance effects on /v3/role_assignments API.
-
- Inherited assignments on a domain or project have no effect on them
- directly, but on the projects under them instead.
-
- Tests on this class make assertions on the effect of inherited assignments
- and API filtering.
-
- """
-
- def _get_role_assignments_query_url(self, **filters):
- """Returns effective role assignments query URL from given filters.
-
- For test methods in this class, effetive will always be true. As in
- effective mode, inherited_to_projects, group_id, domain_id and
- project_id will always be desconsidered from provided filters.
-
- :param filters: query parameters are created with the provided filters.
- Valid filters are: role_id, domain_id, project_id,
- group_id, user_id and inherited_to_projects.
-
- :returns: role assignments query URL.
-
- """
- query_filters = filters.copy()
- query_filters.pop('inherited_to_projects')
-
- query_filters.pop('group_id', None)
- query_filters.pop('domain_id', None)
- query_filters.pop('project_id', None)
-
- return self.build_role_assignment_query_url(effective=True,
- **query_filters)
-
- def _list_expected_role_assignments(self, **filters):
- """Given the filters, it returns expected direct role assignments.
-
- :param filters: filters that will be considered when listing role
- assignments. Valid filters are: role_id, domain_id,
- project_id, group_id, user_id and
- inherited_to_projects.
-
- :returns: the list of the expected role assignments.
-
- """
- # Get assignment link, to be put on 'links': {'assignment': link}
- assignment_link = self.build_role_assignment_link(**filters)
-
- # Expand group membership
- user_ids = [None]
- if filters.get('group_id'):
- user_ids = [user['id'] for user in
- self.identity_api.list_users_in_group(
- filters['group_id'])]
- else:
- user_ids = [self.default_user_id]
-
- # Expand role inheritance
- project_ids = [None]
- if filters.get('domain_id'):
- project_ids = [project['id'] for project in
- self.resource_api.list_projects_in_domain(
- filters.pop('domain_id'))]
- else:
- project_ids = [project['id'] for project in
- self.resource_api.list_projects_in_subtree(
- self.project_id)]
-
- # Compute expected role assignments
- assignments = []
- for project_id in project_ids:
- filters['project_id'] = project_id
- for user_id in user_ids:
- filters['user_id'] = user_id
- assignments.append(self.build_role_assignment_entity(
- link=assignment_link, **filters))
-
- return assignments
-
-
-class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
- test_v3.AssignmentTestMixin):
- """Test inheritance crud and its effects."""
-
- def config_overrides(self):
- super(AssignmentInheritanceTestCase, self).config_overrides()
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- def test_get_token_from_inherited_user_domain_role_grants(self):
- # Create a new user to ensure that no grant is loaded from sample data
- user = unit.create_user(self.identity_api, domain_id=self.domain_id)
-
- # Define domain and project authentication data
- domain_auth_data = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- domain_id=self.domain_id)
- project_auth_data = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- project_id=self.project_id)
-
- # Check the user cannot get a domain nor a project token
- self.v3_create_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Grant non-inherited role for user on domain
- non_inher_ud_link = self.build_role_assignment_link(
- domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
- self.put(non_inher_ud_link)
-
- # Check the user can get only a domain token
- self.v3_create_token(domain_auth_data)
- self.v3_create_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Create inherited role
- inherited_role = unit.new_role_ref(name='inherited')
- self.role_api.create_role(inherited_role['id'], inherited_role)
-
- # Grant inherited role for user on domain
- inher_ud_link = self.build_role_assignment_link(
- domain_id=self.domain_id, user_id=user['id'],
- role_id=inherited_role['id'], inherited_to_projects=True)
- self.put(inher_ud_link)
-
- # Check the user can get both a domain and a project token
- self.v3_create_token(domain_auth_data)
- self.v3_create_token(project_auth_data)
-
- # Delete inherited grant
- self.delete(inher_ud_link)
-
- # Check the user can only get a domain token
- self.v3_create_token(domain_auth_data)
- self.v3_create_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Delete non-inherited grant
- self.delete(non_inher_ud_link)
-
- # Check the user cannot get a domain token anymore
- self.v3_create_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_get_token_from_inherited_group_domain_role_grants(self):
- # Create a new group and put a new user in it to
- # ensure that no grant is loaded from sample data
- user = unit.create_user(self.identity_api, domain_id=self.domain_id)
-
- group = unit.new_group_ref(domain_id=self.domain['id'])
- group = self.identity_api.create_group(group)
- self.identity_api.add_user_to_group(user['id'], group['id'])
-
- # Define domain and project authentication data
- domain_auth_data = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- domain_id=self.domain_id)
- project_auth_data = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- project_id=self.project_id)
-
- # Check the user cannot get a domain nor a project token
- self.v3_create_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Grant non-inherited role for user on domain
- non_inher_gd_link = self.build_role_assignment_link(
- domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id)
- self.put(non_inher_gd_link)
-
- # Check the user can get only a domain token
- self.v3_create_token(domain_auth_data)
- self.v3_create_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Create inherited role
- inherited_role = unit.new_role_ref(name='inherited')
- self.role_api.create_role(inherited_role['id'], inherited_role)
-
- # Grant inherited role for user on domain
- inher_gd_link = self.build_role_assignment_link(
- domain_id=self.domain_id, user_id=user['id'],
- role_id=inherited_role['id'], inherited_to_projects=True)
- self.put(inher_gd_link)
-
- # Check the user can get both a domain and a project token
- self.v3_create_token(domain_auth_data)
- self.v3_create_token(project_auth_data)
-
- # Delete inherited grant
- self.delete(inher_gd_link)
-
- # Check the user can only get a domain token
- self.v3_create_token(domain_auth_data)
- self.v3_create_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Delete non-inherited grant
- self.delete(non_inher_gd_link)
-
- # Check the user cannot get a domain token anymore
- self.v3_create_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def _test_crud_inherited_and_direct_assignment_on_target(self, target_url):
- # Create a new role to avoid assignments loaded from sample data
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- # Define URLs
- direct_url = '%s/users/%s/roles/%s' % (
- target_url, self.user_id, role['id'])
- inherited_url = '/OS-INHERIT/%s/inherited_to_projects' % direct_url
-
- # Create the direct assignment
- self.put(direct_url)
- # Check the direct assignment exists, but the inherited one does not
- self.head(direct_url)
- self.head(inherited_url, expected_status=http_client.NOT_FOUND)
-
- # Now add the inherited assignment
- self.put(inherited_url)
- # Check both the direct and inherited assignment exist
- self.head(direct_url)
- self.head(inherited_url)
-
- # Delete indirect assignment
- self.delete(inherited_url)
- # Check the direct assignment exists, but the inherited one does not
- self.head(direct_url)
- self.head(inherited_url, expected_status=http_client.NOT_FOUND)
-
- # Now delete the inherited assignment
- self.delete(direct_url)
- # Check that none of them exist
- self.head(direct_url, expected_status=http_client.NOT_FOUND)
- self.head(inherited_url, expected_status=http_client.NOT_FOUND)
-
- def test_crud_inherited_and_direct_assignment_on_domains(self):
- self._test_crud_inherited_and_direct_assignment_on_target(
- '/domains/%s' % self.domain_id)
-
- def test_crud_inherited_and_direct_assignment_on_projects(self):
- self._test_crud_inherited_and_direct_assignment_on_target(
- '/projects/%s' % self.project_id)
-
- def test_crud_user_inherited_domain_role_grants(self):
- role_list = []
- for _ in range(2):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- # Create a non-inherited role as a spoiler
- self.assignment_api.create_grant(
- role_list[1]['id'], user_id=self.user['id'],
- domain_id=self.domain_id)
-
- base_collection_url = (
- '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
- 'collection_url': base_collection_url,
- 'role_id': role_list[0]['id']}
- collection_url = base_collection_url + '/inherited_to_projects'
-
- self.put(member_url)
-
- # Check we can read it back
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=role_list[0],
- resource_url=collection_url)
-
- # Now delete and check its gone
- self.delete(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, expected_length=0,
- resource_url=collection_url)
-
- def test_list_role_assignments_for_inherited_domain_grants(self):
- """Call ``GET /role_assignments with inherited domain grants``.
-
- Test Plan:
-
- - Create 4 roles
- - Create a domain with a user and two projects
- - Assign two direct roles to project1
- - Assign a spoiler role to project2
- - Issue the URL to add inherited role to the domain
- - Issue the URL to check it is indeed on the domain
- - Issue the URL to check effective roles on project1 - this
- should return 3 roles.
-
- """
- role_list = []
- for _ in range(4):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
- # Add some roles to the project
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project1['id'], role_list[0]['id'])
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project1['id'], role_list[1]['id'])
- # ..and one on a different project as a spoiler
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project2['id'], role_list[2]['id'])
-
- # Now create our inherited role on the domain
- base_collection_url = (
- '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': domain['id'],
- 'user_id': user1['id']})
- member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
- 'collection_url': base_collection_url,
- 'role_id': role_list[3]['id']}
- collection_url = base_collection_url + '/inherited_to_projects'
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=role_list[3],
- resource_url=collection_url)
-
- # Now use the list domain role assignments api to check if this
- # is included
- collection_url = (
- '/role_assignments?user.id=%(user_id)s'
- '&scope.domain.id=%(domain_id)s' % {
- 'user_id': user1['id'],
- 'domain_id': domain['id']})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=1,
- resource_url=collection_url)
- ud_entity = self.build_role_assignment_entity(
- domain_id=domain['id'], user_id=user1['id'],
- role_id=role_list[3]['id'], inherited_to_projects=True)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
-
- # Now ask for effective list role assignments - the role should
- # turn into a project role, along with the two direct roles that are
- # on the project
- collection_url = (
- '/role_assignments?effective&user.id=%(user_id)s'
- '&scope.project.id=%(project_id)s' % {
- 'user_id': user1['id'],
- 'project_id': project1['id']})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=3,
- resource_url=collection_url)
- # An effective role for an inherited role will be a project
- # entity, with a domain link to the inherited assignment
- ud_url = self.build_role_assignment_link(
- domain_id=domain['id'], user_id=user1['id'],
- role_id=role_list[3]['id'], inherited_to_projects=True)
- up_entity = self.build_role_assignment_entity(
- link=ud_url, project_id=project1['id'],
- user_id=user1['id'], role_id=role_list[3]['id'],
- inherited_to_projects=True)
- self.assertRoleAssignmentInListResponse(r, up_entity)
-
- def test_list_role_assignments_include_names(self):
- """Call ``GET /role_assignments with include names``.
-
- Test Plan:
-
- - Create a domain with a group and a user
- - Create a project with a group and a user
-
- """
- role1 = unit.new_role_ref()
- self.role_api.create_role(role1['id'], role1)
- user1 = unit.create_user(self.identity_api, domain_id=self.domain_id)
- group = unit.new_group_ref(domain_id=self.domain_id)
- group = self.identity_api.create_group(group)
- project1 = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project1['id'], project1)
-
- expected_entity1 = self.build_role_assignment_entity_include_names(
- role_ref=role1,
- project_ref=project1,
- user_ref=user1)
- self.put(expected_entity1['links']['assignment'])
- expected_entity2 = self.build_role_assignment_entity_include_names(
- role_ref=role1,
- domain_ref=self.domain,
- group_ref=group)
- self.put(expected_entity2['links']['assignment'])
- expected_entity3 = self.build_role_assignment_entity_include_names(
- role_ref=role1,
- domain_ref=self.domain,
- user_ref=user1)
- self.put(expected_entity3['links']['assignment'])
- expected_entity4 = self.build_role_assignment_entity_include_names(
- role_ref=role1,
- project_ref=project1,
- group_ref=group)
- self.put(expected_entity4['links']['assignment'])
-
- collection_url_domain = (
- '/role_assignments?include_names&scope.domain.id=%(domain_id)s' % {
- 'domain_id': self.domain_id})
- rs_domain = self.get(collection_url_domain)
- collection_url_project = (
- '/role_assignments?include_names&'
- 'scope.project.id=%(project_id)s' % {
- 'project_id': project1['id']})
- rs_project = self.get(collection_url_project)
- collection_url_group = (
- '/role_assignments?include_names&group.id=%(group_id)s' % {
- 'group_id': group['id']})
- rs_group = self.get(collection_url_group)
- collection_url_user = (
- '/role_assignments?include_names&user.id=%(user_id)s' % {
- 'user_id': user1['id']})
- rs_user = self.get(collection_url_user)
- collection_url_role = (
- '/role_assignments?include_names&role.id=%(role_id)s' % {
- 'role_id': role1['id']})
- rs_role = self.get(collection_url_role)
- # Make sure all entities were created successfully
- self.assertEqual(rs_domain.status_int, http_client.OK)
- self.assertEqual(rs_project.status_int, http_client.OK)
- self.assertEqual(rs_group.status_int, http_client.OK)
- self.assertEqual(rs_user.status_int, http_client.OK)
- # Make sure we can get back the correct number of entities
- self.assertValidRoleAssignmentListResponse(
- rs_domain,
- expected_length=2,
- resource_url=collection_url_domain)
- self.assertValidRoleAssignmentListResponse(
- rs_project,
- expected_length=2,
- resource_url=collection_url_project)
- self.assertValidRoleAssignmentListResponse(
- rs_group,
- expected_length=2,
- resource_url=collection_url_group)
- self.assertValidRoleAssignmentListResponse(
- rs_user,
- expected_length=2,
- resource_url=collection_url_user)
- self.assertValidRoleAssignmentListResponse(
- rs_role,
- expected_length=4,
- resource_url=collection_url_role)
- # Verify all types of entities have the correct format
- self.assertRoleAssignmentInListResponse(rs_domain, expected_entity2)
- self.assertRoleAssignmentInListResponse(rs_project, expected_entity1)
- self.assertRoleAssignmentInListResponse(rs_group, expected_entity4)
- self.assertRoleAssignmentInListResponse(rs_user, expected_entity3)
- self.assertRoleAssignmentInListResponse(rs_role, expected_entity1)
-
- def test_list_role_assignments_for_disabled_inheritance_extension(self):
- """Call ``GET /role_assignments with inherited domain grants``.
-
- Test Plan:
-
- - Issue the URL to add inherited role to the domain
- - Issue the URL to check effective roles on project include the
- inherited role
- - Disable the extension
- - Re-check the effective roles, proving the inherited role no longer
- shows up.
-
- """
- role_list = []
- for _ in range(4):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
- # Add some roles to the project
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project1['id'], role_list[0]['id'])
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project1['id'], role_list[1]['id'])
- # ..and one on a different project as a spoiler
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project2['id'], role_list[2]['id'])
-
- # Now create our inherited role on the domain
- base_collection_url = (
- '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': domain['id'],
- 'user_id': user1['id']})
- member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
- 'collection_url': base_collection_url,
- 'role_id': role_list[3]['id']}
- collection_url = base_collection_url + '/inherited_to_projects'
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=role_list[3],
- resource_url=collection_url)
-
- # Get effective list role assignments - the role should
- # turn into a project role, along with the two direct roles that are
- # on the project
- collection_url = (
- '/role_assignments?effective&user.id=%(user_id)s'
- '&scope.project.id=%(project_id)s' % {
- 'user_id': user1['id'],
- 'project_id': project1['id']})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=3,
- resource_url=collection_url)
-
- ud_url = self.build_role_assignment_link(
- domain_id=domain['id'], user_id=user1['id'],
- role_id=role_list[3]['id'], inherited_to_projects=True)
- up_entity = self.build_role_assignment_entity(
- link=ud_url, project_id=project1['id'],
- user_id=user1['id'], role_id=role_list[3]['id'],
- inherited_to_projects=True)
-
- self.assertRoleAssignmentInListResponse(r, up_entity)
-
- # Disable the extension and re-check the list, the role inherited
- # from the project should no longer show up
- self.config_fixture.config(group='os_inherit', enabled=False)
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
-
- self.assertRoleAssignmentNotInListResponse(r, up_entity)
-
- def test_list_role_assignments_for_inherited_group_domain_grants(self):
- """Call ``GET /role_assignments with inherited group domain grants``.
-
- Test Plan:
-
- - Create 4 roles
- - Create a domain with a user and two projects
- - Assign two direct roles to project1
- - Assign a spoiler role to project2
- - Issue the URL to add inherited role to the domain
- - Issue the URL to check it is indeed on the domain
- - Issue the URL to check effective roles on project1 - this
- should return 3 roles.
-
- """
- role_list = []
- for _ in range(4):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
- user2 = unit.create_user(self.identity_api, domain_id=domain['id'])
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user2['id'],
- group1['id'])
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
- # Add some roles to the project
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project1['id'], role_list[0]['id'])
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project1['id'], role_list[1]['id'])
- # ..and one on a different project as a spoiler
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project2['id'], role_list[2]['id'])
-
- # Now create our inherited role on the domain
- base_collection_url = (
- '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
- 'domain_id': domain['id'],
- 'group_id': group1['id']})
- member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
- 'collection_url': base_collection_url,
- 'role_id': role_list[3]['id']}
- collection_url = base_collection_url + '/inherited_to_projects'
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=role_list[3],
- resource_url=collection_url)
-
- # Now use the list domain role assignments api to check if this
- # is included
- collection_url = (
- '/role_assignments?group.id=%(group_id)s'
- '&scope.domain.id=%(domain_id)s' % {
- 'group_id': group1['id'],
- 'domain_id': domain['id']})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=1,
- resource_url=collection_url)
- gd_entity = self.build_role_assignment_entity(
- domain_id=domain['id'], group_id=group1['id'],
- role_id=role_list[3]['id'], inherited_to_projects=True)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
-
- # Now ask for effective list role assignments - the role should
- # turn into a user project role, along with the two direct roles
- # that are on the project
- collection_url = (
- '/role_assignments?effective&user.id=%(user_id)s'
- '&scope.project.id=%(project_id)s' % {
- 'user_id': user1['id'],
- 'project_id': project1['id']})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=3,
- resource_url=collection_url)
- # An effective role for an inherited role will be a project
- # entity, with a domain link to the inherited assignment
- up_entity = self.build_role_assignment_entity(
- link=gd_entity['links']['assignment'], project_id=project1['id'],
- user_id=user1['id'], role_id=role_list[3]['id'],
- inherited_to_projects=True)
- self.assertRoleAssignmentInListResponse(r, up_entity)
-
- def test_filtered_role_assignments_for_inherited_grants(self):
- """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
-
- Test Plan:
-
- - Create 5 roles
- - Create a domain with a user, group and two projects
- - Assign three direct spoiler roles to projects
- - Issue the URL to add an inherited user role to the domain
- - Issue the URL to add an inherited group role to the domain
- - Issue the URL to filter by inherited roles - this should
- return just the 2 inherited roles.
-
- """
- role_list = []
- for _ in range(5):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
- group1 = unit.new_group_ref(domain_id=domain['id'])
- group1 = self.identity_api.create_group(group1)
- project1 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project1['id'], project1)
- project2 = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project2['id'], project2)
- # Add some spoiler roles to the projects
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project1['id'], role_list[0]['id'])
- self.assignment_api.add_role_to_user_and_project(
- user1['id'], project2['id'], role_list[1]['id'])
- # Create a non-inherited role as a spoiler
- self.assignment_api.create_grant(
- role_list[2]['id'], user_id=user1['id'], domain_id=domain['id'])
-
- # Now create two inherited roles on the domain, one for a user
- # and one for a domain
- base_collection_url = (
- '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': domain['id'],
- 'user_id': user1['id']})
- member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
- 'collection_url': base_collection_url,
- 'role_id': role_list[3]['id']}
- collection_url = base_collection_url + '/inherited_to_projects'
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=role_list[3],
- resource_url=collection_url)
-
- base_collection_url = (
- '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
- 'domain_id': domain['id'],
- 'group_id': group1['id']})
- member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
- 'collection_url': base_collection_url,
- 'role_id': role_list[4]['id']}
- collection_url = base_collection_url + '/inherited_to_projects'
-
- self.put(member_url)
- self.head(member_url)
- r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=role_list[4],
- resource_url=collection_url)
-
- # Now use the list role assignments api to get a list of inherited
- # roles on the domain - should get back the two roles
- collection_url = (
- '/role_assignments?scope.OS-INHERIT:inherited_to=projects')
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- expected_length=2,
- resource_url=collection_url)
- ud_entity = self.build_role_assignment_entity(
- domain_id=domain['id'], user_id=user1['id'],
- role_id=role_list[3]['id'], inherited_to_projects=True)
- gd_entity = self.build_role_assignment_entity(
- domain_id=domain['id'], group_id=group1['id'],
- role_id=role_list[4]['id'], inherited_to_projects=True)
- self.assertRoleAssignmentInListResponse(r, ud_entity)
- self.assertRoleAssignmentInListResponse(r, gd_entity)
-
- def _setup_hierarchical_projects_scenario(self):
- """Creates basic hierarchical projects scenario.
-
- This basic scenario contains a root with one leaf project and
- two roles with the following names: non-inherited and inherited.
-
- """
- # Create project hierarchy
- root = unit.new_project_ref(domain_id=self.domain['id'])
- leaf = unit.new_project_ref(domain_id=self.domain['id'],
- parent_id=root['id'])
-
- self.resource_api.create_project(root['id'], root)
- self.resource_api.create_project(leaf['id'], leaf)
-
- # Create 'non-inherited' and 'inherited' roles
- non_inherited_role = unit.new_role_ref(name='non-inherited')
- self.role_api.create_role(non_inherited_role['id'], non_inherited_role)
- inherited_role = unit.new_role_ref(name='inherited')
- self.role_api.create_role(inherited_role['id'], inherited_role)
-
- return (root['id'], leaf['id'],
- non_inherited_role['id'], inherited_role['id'])
-
- def test_get_token_from_inherited_user_project_role_grants(self):
- # Create default scenario
- root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
- self._setup_hierarchical_projects_scenario())
-
- # Define root and leaf projects authentication data
- root_project_auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=root_id)
- leaf_project_auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=leaf_id)
-
- # Check the user cannot get a token on root nor leaf project
- self.v3_create_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Grant non-inherited role for user on leaf project
- non_inher_up_link = self.build_role_assignment_link(
- project_id=leaf_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_up_link)
-
- # Check the user can only get a token on leaf project
- self.v3_create_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(leaf_project_auth_data)
-
- # Grant inherited role for user on root project
- inher_up_link = self.build_role_assignment_link(
- project_id=root_id, user_id=self.user['id'],
- role_id=inherited_role_id, inherited_to_projects=True)
- self.put(inher_up_link)
-
- # Check the user still can get a token only on leaf project
- self.v3_create_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(leaf_project_auth_data)
-
- # Delete non-inherited grant
- self.delete(non_inher_up_link)
-
- # Check the inherited role still applies for leaf project
- self.v3_create_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(leaf_project_auth_data)
-
- # Delete inherited grant
- self.delete(inher_up_link)
-
- # Check the user cannot get a token on leaf project anymore
- self.v3_create_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_get_token_from_inherited_group_project_role_grants(self):
- # Create default scenario
- root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
- self._setup_hierarchical_projects_scenario())
-
- # Create group and add user to it
- group = unit.new_group_ref(domain_id=self.domain['id'])
- group = self.identity_api.create_group(group)
- self.identity_api.add_user_to_group(self.user['id'], group['id'])
-
- # Define root and leaf projects authentication data
- root_project_auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=root_id)
- leaf_project_auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=leaf_id)
-
- # Check the user cannot get a token on root nor leaf project
- self.v3_create_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # Grant non-inherited role for group on leaf project
- non_inher_gp_link = self.build_role_assignment_link(
- project_id=leaf_id, group_id=group['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_gp_link)
-
- # Check the user can only get a token on leaf project
- self.v3_create_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(leaf_project_auth_data)
-
- # Grant inherited role for group on root project
- inher_gp_link = self.build_role_assignment_link(
- project_id=root_id, group_id=group['id'],
- role_id=inherited_role_id, inherited_to_projects=True)
- self.put(inher_gp_link)
-
- # Check the user still can get a token only on leaf project
- self.v3_create_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_create_token(leaf_project_auth_data)
-
- # Delete no-inherited grant
- self.delete(non_inher_gp_link)
-
- # Check the inherited role still applies for leaf project
- self.v3_create_token(leaf_project_auth_data)
-
- # Delete inherited grant
- self.delete(inher_gp_link)
-
- # Check the user cannot get a token on leaf project anymore
- self.v3_create_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_get_role_assignments_for_project_hierarchy(self):
- """Call ``GET /role_assignments``.
-
- Test Plan:
-
- - Create 2 roles
- - Create a hierarchy of projects with one root and one leaf project
- - Issue the URL to add a non-inherited user role to the root project
- - Issue the URL to add an inherited user role to the root project
- - Issue the URL to get all role assignments - this should return just
- 2 roles (non-inherited and inherited) in the root project.
-
- """
- # Create default scenario
- root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
- self._setup_hierarchical_projects_scenario())
-
- # Grant non-inherited role
- non_inher_up_entity = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_up_entity['links']['assignment'])
-
- # Grant inherited role
- inher_up_entity = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=inherited_role_id, inherited_to_projects=True)
- self.put(inher_up_entity['links']['assignment'])
-
- # Get role assignments
- collection_url = '/role_assignments'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- resource_url=collection_url)
-
- # Assert that the user has non-inherited role on root project
- self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
-
- # Assert that the user has inherited role on root project
- self.assertRoleAssignmentInListResponse(r, inher_up_entity)
-
- # Assert that the user does not have non-inherited role on leaf project
- non_inher_up_entity = self.build_role_assignment_entity(
- project_id=leaf_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
-
- # Assert that the user does not have inherited role on leaf project
- inher_up_entity['scope']['project']['id'] = leaf_id
- self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
-
- def test_get_effective_role_assignments_for_project_hierarchy(self):
- """Call ``GET /role_assignments?effective``.
-
- Test Plan:
-
- - Create 2 roles
- - Create a hierarchy of projects with one root and one leaf project
- - Issue the URL to add a non-inherited user role to the root project
- - Issue the URL to add an inherited user role to the root project
- - Issue the URL to get effective role assignments - this should return
- 1 role (non-inherited) on the root project and 1 role (inherited) on
- the leaf project.
-
- """
- # Create default scenario
- root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
- self._setup_hierarchical_projects_scenario())
-
- # Grant non-inherited role
- non_inher_up_entity = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_up_entity['links']['assignment'])
-
- # Grant inherited role
- inher_up_entity = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=inherited_role_id, inherited_to_projects=True)
- self.put(inher_up_entity['links']['assignment'])
-
- # Get effective role assignments
- collection_url = '/role_assignments?effective'
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- resource_url=collection_url)
-
- # Assert that the user has non-inherited role on root project
- self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
-
- # Assert that the user does not have inherited role on root project
- self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
-
- # Assert that the user does not have non-inherited role on leaf project
- non_inher_up_entity = self.build_role_assignment_entity(
- project_id=leaf_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
-
- # Assert that the user has inherited role on leaf project
- inher_up_entity['scope']['project']['id'] = leaf_id
- self.assertRoleAssignmentInListResponse(r, inher_up_entity)
-
- def test_project_id_specified_if_include_subtree_specified(self):
- """When using include_subtree, you must specify a project ID."""
- self.get('/role_assignments?include_subtree=True',
- expected_status=http_client.BAD_REQUEST)
- self.get('/role_assignments?scope.project.id&'
- 'include_subtree=True',
- expected_status=http_client.BAD_REQUEST)
-
- def test_get_role_assignments_for_project_tree(self):
- """Get role_assignment?scope.project.id=X?include_subtree``.
-
- Test Plan:
-
- - Create 2 roles and a hierarchy of projects with one root and one leaf
- - Issue the URL to add a non-inherited user role to the root project
- and the leaf project
- - Issue the URL to get role assignments for the root project but
- not the subtree - this should return just the root assignment
- - Issue the URL to get role assignments for the root project and
- it's subtree - this should return both assignments
- - Check that explicitly setting include_subtree to False is the
- equivalent to not including it at all in the query.
-
- """
- # Create default scenario
- root_id, leaf_id, non_inherited_role_id, unused_role_id = (
- self._setup_hierarchical_projects_scenario())
-
- # Grant non-inherited role to root and leaf projects
- non_inher_entity_root = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_entity_root['links']['assignment'])
- non_inher_entity_leaf = self.build_role_assignment_entity(
- project_id=leaf_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_entity_leaf['links']['assignment'])
-
- # Without the subtree, we should get the one assignment on the
- # root project
- collection_url = (
- '/role_assignments?scope.project.id=%(project)s' % {
- 'project': root_id})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r, resource_url=collection_url)
-
- self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
- self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
-
- # With the subtree, we should get both assignments
- collection_url = (
- '/role_assignments?scope.project.id=%(project)s'
- '&include_subtree=True' % {
- 'project': root_id})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r, resource_url=collection_url)
-
- self.assertThat(r.result['role_assignments'], matchers.HasLength(2))
- self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
- self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
-
- # With subtree=0, we should also only get the one assignment on the
- # root project
- collection_url = (
- '/role_assignments?scope.project.id=%(project)s'
- '&include_subtree=0' % {
- 'project': root_id})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r, resource_url=collection_url)
-
- self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
- self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
-
- def test_get_effective_role_assignments_for_project_tree(self):
- """Get role_assignment ?project_id=X?include_subtree=True?effective``.
-
- Test Plan:
-
- - Create 2 roles and a hierarchy of projects with one root and 4 levels
- of child project
- - Issue the URL to add a non-inherited user role to the root project
- and a level 1 project
- - Issue the URL to add an inherited user role on the level 2 project
- - Issue the URL to get effective role assignments for the level 1
- project and it's subtree - this should return a role (non-inherited)
- on the level 1 project and roles (inherited) on each of the level
- 2, 3 and 4 projects
-
- """
- # Create default scenario
- root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
- self._setup_hierarchical_projects_scenario())
-
- # Add some extra projects to the project hierarchy
- level2 = unit.new_project_ref(domain_id=self.domain['id'],
- parent_id=leaf_id)
- level3 = unit.new_project_ref(domain_id=self.domain['id'],
- parent_id=level2['id'])
- level4 = unit.new_project_ref(domain_id=self.domain['id'],
- parent_id=level3['id'])
- self.resource_api.create_project(level2['id'], level2)
- self.resource_api.create_project(level3['id'], level3)
- self.resource_api.create_project(level4['id'], level4)
-
- # Grant non-inherited role to root (as a spoiler) and to
- # the level 1 (leaf) project
- non_inher_entity_root = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_entity_root['links']['assignment'])
- non_inher_entity_leaf = self.build_role_assignment_entity(
- project_id=leaf_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_entity_leaf['links']['assignment'])
-
- # Grant inherited role to level 2
- inher_entity = self.build_role_assignment_entity(
- project_id=level2['id'], user_id=self.user['id'],
- role_id=inherited_role_id, inherited_to_projects=True)
- self.put(inher_entity['links']['assignment'])
-
- # Get effective role assignments
- collection_url = (
- '/role_assignments?scope.project.id=%(project)s'
- '&include_subtree=True&effective' % {
- 'project': leaf_id})
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(
- r, resource_url=collection_url)
-
- # There should be three assignments returned in total
- self.assertThat(r.result['role_assignments'], matchers.HasLength(3))
-
- # Assert that the user does not non-inherited role on root project
- self.assertRoleAssignmentNotInListResponse(r, non_inher_entity_root)
-
- # Assert that the user does have non-inherited role on leaf project
- self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
-
- # Assert that the user has inherited role on levels 3 and 4
- inher_entity['scope']['project']['id'] = level3['id']
- self.assertRoleAssignmentInListResponse(r, inher_entity)
- inher_entity['scope']['project']['id'] = level4['id']
- self.assertRoleAssignmentInListResponse(r, inher_entity)
-
- def test_get_inherited_role_assignments_for_project_hierarchy(self):
- """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
-
- Test Plan:
-
- - Create 2 roles
- - Create a hierarchy of projects with one root and one leaf project
- - Issue the URL to add a non-inherited user role to the root project
- - Issue the URL to add an inherited user role to the root project
- - Issue the URL to filter inherited to projects role assignments - this
- should return 1 role (inherited) on the root project.
-
- """
- # Create default scenario
- root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
- self._setup_hierarchical_projects_scenario())
-
- # Grant non-inherited role
- non_inher_up_entity = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.put(non_inher_up_entity['links']['assignment'])
-
- # Grant inherited role
- inher_up_entity = self.build_role_assignment_entity(
- project_id=root_id, user_id=self.user['id'],
- role_id=inherited_role_id, inherited_to_projects=True)
- self.put(inher_up_entity['links']['assignment'])
-
- # Get inherited role assignments
- collection_url = ('/role_assignments'
- '?scope.OS-INHERIT:inherited_to=projects')
- r = self.get(collection_url)
- self.assertValidRoleAssignmentListResponse(r,
- resource_url=collection_url)
-
- # Assert that the user does not have non-inherited role on root project
- self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
-
- # Assert that the user has inherited role on root project
- self.assertRoleAssignmentInListResponse(r, inher_up_entity)
-
- # Assert that the user does not have non-inherited role on leaf project
- non_inher_up_entity = self.build_role_assignment_entity(
- project_id=leaf_id, user_id=self.user['id'],
- role_id=non_inherited_role_id)
- self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
-
- # Assert that the user does not have inherited role on leaf project
- inher_up_entity['scope']['project']['id'] = leaf_id
- self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
-
-
-class AssignmentInheritanceDisabledTestCase(test_v3.RestfulTestCase):
- """Test inheritance crud and its effects."""
-
- def config_overrides(self):
- super(AssignmentInheritanceDisabledTestCase, self).config_overrides()
- self.config_fixture.config(group='os_inherit', enabled=False)
-
- def test_crud_inherited_role_grants_failed_if_disabled(self):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- base_collection_url = (
- '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
- 'collection_url': base_collection_url,
- 'role_id': role['id']}
- collection_url = base_collection_url + '/inherited_to_projects'
-
- self.put(member_url, expected_status=http_client.NOT_FOUND)
- self.head(member_url, expected_status=http_client.NOT_FOUND)
- self.get(collection_url, expected_status=http_client.NOT_FOUND)
- self.delete(member_url, expected_status=http_client.NOT_FOUND)
-
-
-class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin,
- unit.TestCase):
- def _create_role(self):
- """Call ``POST /roles``."""
- ref = unit.new_role_ref()
- r = self.post('/roles', body={'role': ref})
- return self.assertValidRoleResponse(r, ref)
-
- def test_list_implied_roles_none(self):
- self.prior = self._create_role()
- url = '/roles/%s/implies' % (self.prior['id'])
- response = self.get(url).json["role_inference"]
- self.assertEqual(self.prior['id'], response['prior_role']['id'])
- self.assertEqual(0, len(response['implies']))
-
- def _create_implied_role(self, prior, implied):
- self.put('/roles/%s/implies/%s' % (prior['id'], implied['id']),
- expected_status=http_client.CREATED)
-
- def _delete_implied_role(self, prior, implied):
- self.delete('/roles/%s/implies/%s' % (prior['id'], implied['id']))
-
- def _setup_prior_two_implied(self):
- self.prior = self._create_role()
- self.implied1 = self._create_role()
- self._create_implied_role(self.prior, self.implied1)
- self.implied2 = self._create_role()
- self._create_implied_role(self.prior, self.implied2)
-
- def _assert_expected_implied_role_response(
- self, expected_prior_id, expected_implied_ids):
- r = self.get('/roles/%s/implies' % expected_prior_id)
- response = r.json["role_inference"]
- self.assertEqual(expected_prior_id, response['prior_role']['id'])
-
- actual_implied_ids = [implied['id'] for implied in response['implies']]
-
- for expected_id in expected_implied_ids:
- self.assertIn(expected_id, actual_implied_ids)
- self.assertEqual(len(expected_implied_ids), len(response['implies']))
-
- self.assertIsNotNone(response['prior_role']['links']['self'])
- for implied in response['implies']:
- self.assertIsNotNone(implied['links']['self'])
-
- def _assert_two_roles_implied(self):
- self._assert_expected_implied_role_response(
- self.prior['id'], [self.implied1['id'], self.implied2['id']])
-
- def _assert_one_role_implied(self):
- self._assert_expected_implied_role_response(
- self.prior['id'], [self.implied1['id']])
-
- self.get('/roles/%s/implies/%s' %
- (self.prior['id'], self.implied2['id']),
- expected_status=http_client.NOT_FOUND)
-
- def _assert_two_rules_defined(self):
- r = self.get('/role_inferences/')
-
- rules = r.result['role_inferences']
-
- self.assertEqual(self.prior['id'], rules[0]['prior_role']['id'])
- self.assertEqual(2, len(rules[0]['implies']))
- implied_ids = [implied['id'] for implied in rules[0]['implies']]
- implied_names = [implied['name'] for implied in rules[0]['implies']]
-
- self.assertIn(self.implied1['id'], implied_ids)
- self.assertIn(self.implied2['id'], implied_ids)
- self.assertIn(self.implied1['name'], implied_names)
- self.assertIn(self.implied2['name'], implied_names)
-
- def _assert_one_rule_defined(self):
- r = self.get('/role_inferences/')
- rules = r.result['role_inferences']
- self.assertEqual(self.prior['id'], rules[0]['prior_role']['id'])
- self.assertEqual(self.implied1['id'], rules[0]['implies'][0]['id'])
- self.assertEqual(self.implied1['name'], rules[0]['implies'][0]['name'])
- self.assertEqual(1, len(rules[0]['implies']))
-
- def test_list_all_rules(self):
- self._setup_prior_two_implied()
- self._assert_two_rules_defined()
-
- self._delete_implied_role(self.prior, self.implied2)
- self._assert_one_rule_defined()
-
- def test_CRD_implied_roles(self):
-
- self._setup_prior_two_implied()
- self._assert_two_roles_implied()
-
- self._delete_implied_role(self.prior, self.implied2)
- self._assert_one_role_implied()
-
- def _create_three_roles(self):
- self.role_list = []
- for _ in range(3):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- self.role_list.append(role)
-
- def _create_test_domain_user_project(self):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user = unit.create_user(self.identity_api, domain_id=domain['id'])
- project = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project['id'], project)
- return domain, user, project
-
- def _assign_top_role_to_user_on_project(self, user, project):
- self.assignment_api.add_role_to_user_and_project(
- user['id'], project['id'], self.role_list[0]['id'])
-
- def _build_effective_role_assignments_url(self, user):
- return '/role_assignments?effective&user.id=%(user_id)s' % {
- 'user_id': user['id']}
-
- def _assert_all_roles_in_assignment(self, response, user):
- # Now use the list role assignments api to check that all three roles
- # appear in the collection
- self.assertValidRoleAssignmentListResponse(
- response,
- expected_length=len(self.role_list),
- resource_url=self._build_effective_role_assignments_url(user))
-
- def _assert_initial_assignment_in_effective(self, response, user, project):
- # The initial assignment should be there (the link url will be
- # generated and checked automatically since it matches the assignment)
- entity = self.build_role_assignment_entity(
- project_id=project['id'],
- user_id=user['id'], role_id=self.role_list[0]['id'])
- self.assertRoleAssignmentInListResponse(response, entity)
-
- def _assert_effective_role_for_implied_has_prior_in_links(
- self, response, user, project, prior_index, implied_index):
- # An effective role for an implied role will have the prior role
- # assignment in the links
- prior_link = '/prior_roles/%(prior)s/implies/%(implied)s' % {
- 'prior': self.role_list[prior_index]['id'],
- 'implied': self.role_list[implied_index]['id']}
- link = self.build_role_assignment_link(
- project_id=project['id'], user_id=user['id'],
- role_id=self.role_list[prior_index]['id'])
- entity = self.build_role_assignment_entity(
- link=link, project_id=project['id'],
- user_id=user['id'], role_id=self.role_list[implied_index]['id'],
- prior_link=prior_link)
- self.assertRoleAssignmentInListResponse(response, entity)
-
- def test_list_role_assignments_with_implied_roles(self):
- """Call ``GET /role_assignments`` with implied role grant.
-
- Test Plan:
-
- - Create a domain with a user and a project
- - Create 3 roles
- - Role 0 implies role 1 and role 1 implies role 2
- - Assign the top role to the project
- - Issue the URL to check effective roles on project - this
- should return all 3 roles.
- - Check the links of the 3 roles indicate the prior role where
- appropriate
-
- """
- (domain, user, project) = self._create_test_domain_user_project()
- self._create_three_roles()
- self._create_implied_role(self.role_list[0], self.role_list[1])
- self._create_implied_role(self.role_list[1], self.role_list[2])
- self._assign_top_role_to_user_on_project(user, project)
-
- response = self.get(self._build_effective_role_assignments_url(user))
- r = response
-
- self._assert_all_roles_in_assignment(r, user)
- self._assert_initial_assignment_in_effective(response, user, project)
- self._assert_effective_role_for_implied_has_prior_in_links(
- response, user, project, 0, 1)
- self._assert_effective_role_for_implied_has_prior_in_links(
- response, user, project, 1, 2)
-
- def _create_named_role(self, name):
- role = unit.new_role_ref()
- role['name'] = name
- self.role_api.create_role(role['id'], role)
- return role
-
- def test_root_role_as_implied_role_forbidden(self):
- """Test root role is forbidden to be set as an implied role.
-
- Create 2 roles that are prohibited from being an implied role.
- Create 1 additional role which should be accepted as an implied
- role. Assure the prohibited role names cannot be set as an implied
- role. Assure the accepted role name which is not a member of the
- prohibited implied role list can be successfully set an implied
- role.
- """
- prohibited_name1 = 'root1'
- prohibited_name2 = 'root2'
- accepted_name1 = 'implied1'
-
- prohibited_names = [prohibited_name1, prohibited_name2]
- self.config_fixture.config(group='assignment',
- prohibited_implied_role=prohibited_names)
-
- prior_role = self._create_role()
-
- prohibited_role1 = self._create_named_role(prohibited_name1)
- url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
- prior_role_id=prior_role['id'],
- implied_role_id=prohibited_role1['id'])
- self.put(url, expected_status=http_client.FORBIDDEN)
-
- prohibited_role2 = self._create_named_role(prohibited_name2)
- url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
- prior_role_id=prior_role['id'],
- implied_role_id=prohibited_role2['id'])
- self.put(url, expected_status=http_client.FORBIDDEN)
-
- accepted_role1 = self._create_named_role(accepted_name1)
- url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
- prior_role_id=prior_role['id'],
- implied_role_id=accepted_role1['id'])
- self.put(url, expected_status=http_client.CREATED)
-
- def test_trusts_from_implied_role(self):
- self._create_three_roles()
- self._create_implied_role(self.role_list[0], self.role_list[1])
- self._create_implied_role(self.role_list[1], self.role_list[2])
- self._assign_top_role_to_user_on_project(self.user, self.project)
-
- # Create a trustee and assign the prior role to her
- trustee = unit.create_user(self.identity_api, domain_id=self.domain_id)
- ref = unit.new_trust_ref(
- trustor_user_id=self.user['id'],
- trustee_user_id=trustee['id'],
- project_id=self.project['id'],
- role_ids=[self.role_list[0]['id']])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = r.result['trust']
-
- # Only the role that was specified is in the trust, NOT implied roles
- self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id'])
- self.assertThat(trust['roles'], matchers.HasLength(1))
-
- # Authenticate as the trustee
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- token = r.result['token']
- self.assertThat(token['roles'],
- matchers.HasLength(len(self.role_list)))
- for role in token['roles']:
- self.assertIn(role, self.role_list)
- for role in self.role_list:
- self.assertIn(role, token['roles'])
-
- def test_trusts_from_domain_specific_implied_role(self):
- self._create_three_roles()
- # Overwrite the first role with a domain specific role
- role = unit.new_role_ref(domain_id=self.domain_id)
- self.role_list[0] = self.role_api.create_role(role['id'], role)
- self._create_implied_role(self.role_list[0], self.role_list[1])
- self._create_implied_role(self.role_list[1], self.role_list[2])
- self._assign_top_role_to_user_on_project(self.user, self.project)
-
- # Create a trustee and assign the prior role to her
- trustee = unit.create_user(self.identity_api, domain_id=self.domain_id)
- ref = unit.new_trust_ref(
- trustor_user_id=self.user['id'],
- trustee_user_id=trustee['id'],
- project_id=self.project['id'],
- role_ids=[self.role_list[0]['id']])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = r.result['trust']
-
- # Only the role that was specified is in the trust, NOT implied roles
- self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id'])
- self.assertThat(trust['roles'], matchers.HasLength(1))
-
- # Authenticate as the trustee
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- token = r.result['token']
-
- # The token should have the roles implies by the domain specific role,
- # but not the domain specific role itself.
- self.assertThat(token['roles'],
- matchers.HasLength(len(self.role_list) - 1))
- for role in token['roles']:
- self.assertIn(role, self.role_list)
- for role in [self.role_list[1], self.role_list[2]]:
- self.assertIn(role, token['roles'])
- self.assertNotIn(self.role_list[0], token['roles'])
-
-
-class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase):
- def setUp(self):
- def create_role(domain_id=None):
- """Call ``POST /roles``."""
- ref = unit.new_role_ref(domain_id=domain_id)
- r = self.post(
- '/roles',
- body={'role': ref})
- return self.assertValidRoleResponse(r, ref)
-
- super(DomainSpecificRoleTests, self).setUp()
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainB['id'], self.domainB)
-
- self.global_role1 = create_role()
- self.global_role2 = create_role()
- # Since there maybe other global roles already created, let's count
- # them, so we can ensure we can check subsequent list responses
- # are correct
- r = self.get('/roles')
- self.existing_global_roles = len(r.result['roles'])
-
- # And now create some domain specific roles
- self.domainA_role1 = create_role(domain_id=self.domainA['id'])
- self.domainA_role2 = create_role(domain_id=self.domainA['id'])
- self.domainB_role = create_role(domain_id=self.domainB['id'])
-
- def test_get_and_list_domain_specific_roles(self):
- # Check we can get a domain specific role
- r = self.get('/roles/%s' % self.domainA_role1['id'])
- self.assertValidRoleResponse(r, self.domainA_role1)
-
- # If we list without specifying a domain, we should only get global
- # roles back.
- r = self.get('/roles')
- self.assertValidRoleListResponse(
- r, expected_length=self.existing_global_roles)
- self.assertRoleInListResponse(r, self.global_role1)
- self.assertRoleInListResponse(r, self.global_role2)
- self.assertRoleNotInListResponse(r, self.domainA_role1)
- self.assertRoleNotInListResponse(r, self.domainA_role2)
- self.assertRoleNotInListResponse(r, self.domainB_role)
-
- # Now list those in domainA, making sure that's all we get back
- r = self.get('/roles?domain_id=%s' % self.domainA['id'])
- self.assertValidRoleListResponse(r, expected_length=2)
- self.assertRoleInListResponse(r, self.domainA_role1)
- self.assertRoleInListResponse(r, self.domainA_role2)
-
- def test_update_domain_specific_roles(self):
- self.domainA_role1['name'] = uuid.uuid4().hex
- self.patch('/roles/%(role_id)s' % {
- 'role_id': self.domainA_role1['id']},
- body={'role': self.domainA_role1})
- r = self.get('/roles/%s' % self.domainA_role1['id'])
- self.assertValidRoleResponse(r, self.domainA_role1)
-
- def test_delete_domain_specific_roles(self):
- # Check delete only removes that one domain role
- self.delete('/roles/%(role_id)s' % {
- 'role_id': self.domainA_role1['id']})
-
- self.get('/roles/%s' % self.domainA_role1['id'],
- expected_status=http_client.NOT_FOUND)
- # Now re-list those in domainA, making sure there's only one left
- r = self.get('/roles?domain_id=%s' % self.domainA['id'])
- self.assertValidRoleListResponse(r, expected_length=1)
- self.assertRoleInListResponse(r, self.domainA_role2)
-
-
-class ListUserProjectsTestCase(test_v3.RestfulTestCase):
- """Tests for /users/<user>/projects"""
-
- def load_sample_data(self):
- # do not load base class's data, keep it focused on the tests
-
- self.auths = []
- self.domains = []
- self.projects = []
- self.roles = []
- self.users = []
-
- # Create 3 sets of domain, roles, projects, and users to demonstrate
- # the right user's data is loaded and only projects they can access
- # are returned.
-
- for _ in range(3):
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
-
- user = unit.create_user(self.identity_api, domain_id=domain['id'])
-
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- self.assignment_api.create_grant(role['id'],
- user_id=user['id'],
- domain_id=domain['id'])
-
- project = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project['id'], project)
-
- self.assignment_api.create_grant(role['id'],
- user_id=user['id'],
- project_id=project['id'])
-
- auth = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- domain_id=domain['id'])
-
- self.auths.append(auth)
- self.domains.append(domain)
- self.projects.append(project)
- self.roles.append(role)
- self.users.append(user)
-
- def test_list_all(self):
- for i in range(len(self.users)):
- user = self.users[i]
- auth = self.auths[i]
-
- url = '/users/%s/projects' % user['id']
- result = self.get(url, auth=auth)
- projects_result = result.json['projects']
- self.assertEqual(1, len(projects_result))
- self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
-
- def test_list_enabled(self):
- for i in range(len(self.users)):
- user = self.users[i]
- auth = self.auths[i]
-
- # There are no disabled projects
- url = '/users/%s/projects?enabled=True' % user['id']
- result = self.get(url, auth=auth)
- projects_result = result.json['projects']
- self.assertEqual(1, len(projects_result))
- self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
-
- def test_list_disabled(self):
- for i in range(len(self.users)):
- user = self.users[i]
- auth = self.auths[i]
- project = self.projects[i]
-
- # There are no disabled projects
- url = '/users/%s/projects?enabled=False' % user['id']
- result = self.get(url, auth=auth)
- self.assertEqual(0, len(result.json['projects']))
-
- # disable this one and check again
- project['enabled'] = False
- self.resource_api.update_project(project['id'], project)
- result = self.get(url, auth=auth)
- projects_result = result.json['projects']
- self.assertEqual(1, len(projects_result))
- self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
-
- def test_list_by_domain_id(self):
- for i in range(len(self.users)):
- user = self.users[i]
- domain = self.domains[i]
- auth = self.auths[i]
-
- # Try looking for projects with a non-existent domain_id
- url = '/users/%s/projects?domain_id=%s' % (user['id'],
- uuid.uuid4().hex)
- result = self.get(url, auth=auth)
- self.assertEqual(0, len(result.json['projects']))
-
- # Now try a valid one
- url = '/users/%s/projects?domain_id=%s' % (user['id'],
- domain['id'])
- result = self.get(url, auth=auth)
- projects_result = result.json['projects']
- self.assertEqual(1, len(projects_result))
- self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_v3_auth.py b/keystone-moon/keystone/tests/unit/test_v3_auth.py
deleted file mode 100644
index 698feeb8..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_auth.py
+++ /dev/null
@@ -1,4955 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import datetime
-import itertools
-import json
-import operator
-import uuid
-
-from keystoneclient.common import cms
-import mock
-from oslo_config import cfg
-from oslo_log import versionutils
-from oslo_utils import fixture
-from oslo_utils import timeutils
-from six.moves import http_client
-from six.moves import range
-from testtools import matchers
-from testtools import testcase
-
-from keystone import auth
-from keystone.auth.plugins import totp
-from keystone.common import utils
-from keystone.contrib.revoke import routers
-from keystone import exception
-from keystone.policy.backends import rules
-from keystone.tests.common import auth as common_auth
-from keystone.tests import unit
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit import test_v3
-
-CONF = cfg.CONF
-
-
-class TestAuthInfo(common_auth.AuthTestMixin, testcase.TestCase):
- def setUp(self):
- super(TestAuthInfo, self).setUp()
- auth.controllers.load_auth_methods()
-
- def test_missing_auth_methods(self):
- auth_data = {'identity': {}}
- auth_data['identity']['token'] = {'id': uuid.uuid4().hex}
- self.assertRaises(exception.ValidationError,
- auth.controllers.AuthInfo.create,
- None,
- auth_data)
-
- def test_unsupported_auth_method(self):
- auth_data = {'methods': ['abc']}
- auth_data['abc'] = {'test': 'test'}
- auth_data = {'identity': auth_data}
- self.assertRaises(exception.AuthMethodNotSupported,
- auth.controllers.AuthInfo.create,
- None,
- auth_data)
-
- def test_missing_auth_method_data(self):
- auth_data = {'methods': ['password']}
- auth_data = {'identity': auth_data}
- self.assertRaises(exception.ValidationError,
- auth.controllers.AuthInfo.create,
- None,
- auth_data)
-
- def test_project_name_no_domain(self):
- auth_data = self.build_authentication_request(
- username='test',
- password='test',
- project_name='abc')['auth']
- self.assertRaises(exception.ValidationError,
- auth.controllers.AuthInfo.create,
- None,
- auth_data)
-
- def test_both_project_and_domain_in_scope(self):
- auth_data = self.build_authentication_request(
- user_id='test',
- password='test',
- project_name='test',
- domain_name='test')['auth']
- self.assertRaises(exception.ValidationError,
- auth.controllers.AuthInfo.create,
- None,
- auth_data)
-
- def test_get_method_names_duplicates(self):
- auth_data = self.build_authentication_request(
- token='test',
- user_id='test',
- password='test')['auth']
- auth_data['identity']['methods'] = ['password', 'token',
- 'password', 'password']
- context = None
- auth_info = auth.controllers.AuthInfo.create(context, auth_data)
- self.assertEqual(['password', 'token'],
- auth_info.get_method_names())
-
- def test_get_method_data_invalid_method(self):
- auth_data = self.build_authentication_request(
- user_id='test',
- password='test')['auth']
- context = None
- auth_info = auth.controllers.AuthInfo.create(context, auth_data)
-
- method_name = uuid.uuid4().hex
- self.assertRaises(exception.ValidationError,
- auth_info.get_method_data,
- method_name)
-
-
-class TokenAPITests(object):
- # Why is this not just setUp? Because TokenAPITests is not a test class
- # itself. If TokenAPITests became a subclass of the testcase, it would get
- # called by the enumerate-tests-in-file code. The way the functions get
- # resolved in Python for multiple inheritance means that a setUp in this
- # would get skipped by the testrunner.
- def doSetUp(self):
- r = self.v3_create_token(self.build_authentication_request(
- username=self.user['name'],
- user_domain_id=self.domain_id,
- password=self.user['password']))
- self.v3_token_data = r.result
- self.v3_token = r.headers.get('X-Subject-Token')
- self.headers = {'X-Subject-Token': r.headers.get('X-Subject-Token')}
-
- def _make_auth_request(self, auth_data):
- resp = self.post('/auth/tokens', body=auth_data)
- token = resp.headers.get('X-Subject-Token')
- return token
-
- def _get_unscoped_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- return self._make_auth_request(auth_data)
-
- def _get_domain_scoped_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain_id)
- return self._make_auth_request(auth_data)
-
- def _get_project_scoped_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project_id)
- return self._make_auth_request(auth_data)
-
- def _get_trust_scoped_token(self, trustee_user, trust):
- auth_data = self.build_authentication_request(
- user_id=trustee_user['id'],
- password=trustee_user['password'],
- trust_id=trust['id'])
- return self._make_auth_request(auth_data)
-
- def _create_trust(self, impersonation=False):
- # Create a trustee user
- trustee_user = unit.create_user(self.identity_api,
- domain_id=self.domain_id)
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=trustee_user['id'],
- project_id=self.project_id,
- impersonation=impersonation,
- role_ids=[self.role_id])
-
- # Create a trust
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
- return (trustee_user, trust)
-
- def _validate_token(self, token, expected_status=http_client.OK):
- return self.get(
- '/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=expected_status)
-
- def _revoke_token(self, token, expected_status=http_client.NO_CONTENT):
- return self.delete(
- '/auth/tokens',
- headers={'x-subject-token': token},
- expected_status=expected_status)
-
- def _set_user_enabled(self, user, enabled=True):
- user['enabled'] = enabled
- self.identity_api.update_user(user['id'], user)
-
- def test_validate_unscoped_token(self):
- unscoped_token = self._get_unscoped_token()
- self._validate_token(unscoped_token)
-
- def test_revoke_unscoped_token(self):
- unscoped_token = self._get_unscoped_token()
- self._validate_token(unscoped_token)
- self._revoke_token(unscoped_token)
- self._validate_token(unscoped_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_unscoped_token_is_invalid_after_disabling_user(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Disable the user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
-
- def test_unscoped_token_is_invalid_after_enabling_disabled_user(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Disable the user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
- # Enable the user
- self._set_user_enabled(self.user)
- # Ensure validating a token for a re-enabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
-
- def test_unscoped_token_is_invalid_after_disabling_user_domain(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Disable the user's domain
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
-
- def test_unscoped_token_is_invalid_after_changing_user_password(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Change user's password
- self.user['password'] = 'Password1'
- self.identity_api.update_user(self.user['id'], self.user)
- # Ensure updating user's password revokes existing user's tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
-
- def test_validate_domain_scoped_token(self):
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- resp = self._validate_token(domain_scoped_token)
- resp_json = json.loads(resp.body)
- self.assertIsNotNone(resp_json['token']['catalog'])
- self.assertIsNotNone(resp_json['token']['roles'])
- self.assertIsNotNone(resp_json['token']['domain'])
-
- def test_domain_scoped_token_is_invalid_after_disabling_user(self):
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- # Make sure the token is valid
- self._validate_token(domain_scoped_token)
- # Disable user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- domain_scoped_token)
-
- def test_domain_scoped_token_is_invalid_after_deleting_grant(self):
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- # Make sure the token is valid
- self._validate_token(domain_scoped_token)
- # Delete access to domain
- self.assignment_api.delete_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- domain_scoped_token)
-
- def test_domain_scoped_token_invalid_after_disabling_domain(self):
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- # Make sure the token is valid
- self._validate_token(domain_scoped_token)
- # Disable domain
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
- # Ensure validating a token for a disabled domain fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- domain_scoped_token)
-
- def test_v2_validate_domain_scoped_token_returns_unauthorized(self):
- # Test that validating a domain scoped token in v2.0 returns
- # unauthorized.
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
-
- scoped_token = self._get_domain_scoped_token()
- self.assertRaises(exception.Unauthorized,
- self.token_provider_api.validate_v2_token,
- scoped_token)
-
- def test_validate_project_scoped_token(self):
- project_scoped_token = self._get_project_scoped_token()
- self._validate_token(project_scoped_token)
-
- def test_revoke_project_scoped_token(self):
- project_scoped_token = self._get_project_scoped_token()
- self._validate_token(project_scoped_token)
- self._revoke_token(project_scoped_token)
- self._validate_token(project_scoped_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_project_scoped_token_is_invalid_after_disabling_user(self):
- project_scoped_token = self._get_project_scoped_token()
- # Make sure the token is valid
- self._validate_token(project_scoped_token)
- # Disable the user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- project_scoped_token)
-
- def test_project_scoped_token_invalid_after_changing_user_password(self):
- project_scoped_token = self._get_project_scoped_token()
- # Make sure the token is valid
- self._validate_token(project_scoped_token)
- # Update user's password
- self.user['password'] = 'Password1'
- self.identity_api.update_user(self.user['id'], self.user)
- # Ensure updating user's password revokes existing tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- project_scoped_token)
-
- def test_project_scoped_token_invalid_after_disabling_project(self):
- project_scoped_token = self._get_project_scoped_token()
- # Make sure the token is valid
- self._validate_token(project_scoped_token)
- # Disable project
- self.project['enabled'] = False
- self.resource_api.update_project(self.project['id'], self.project)
- # Ensure validating a token for a disabled project fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- project_scoped_token)
-
- def test_rescope_unscoped_token_with_trust(self):
- trustee_user, trust = self._create_trust()
- self._get_trust_scoped_token(trustee_user, trust)
-
- def test_validate_a_trust_scoped_token(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- def test_validate_a_trust_scoped_token_impersonated(self):
- trustee_user, trust = self._create_trust(impersonation=True)
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- def test_revoke_trust_scoped_token(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
- self._revoke_token(trust_scoped_token)
- self._validate_token(trust_scoped_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_trust_scoped_token_is_invalid_after_disabling_trustee(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- # Disable trustee
- trustee_update_ref = dict(enabled=False)
- self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
-
- def test_trust_scoped_token_invalid_after_changing_trustee_password(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
- # Change trustee's password
- trustee_update_ref = dict(password='Password1')
- self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
- # Ensure updating trustee's password revokes existing tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
-
- def test_trust_scoped_token_is_invalid_after_disabling_trustor(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- # Disable the trustor
- trustor_update_ref = dict(enabled=False)
- self.identity_api.update_user(self.user['id'], trustor_update_ref)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
-
- def test_trust_scoped_token_invalid_after_changing_trustor_password(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- # Change trustor's password
- trustor_update_ref = dict(password='Password1')
- self.identity_api.update_user(self.user['id'], trustor_update_ref)
- # Ensure updating trustor's password revokes existing user's tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
-
- def test_trust_scoped_token_invalid_after_disabled_trustor_domain(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- # Disable trustor's domain
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
-
- trustor_update_ref = dict(password='Password1')
- self.identity_api.update_user(self.user['id'], trustor_update_ref)
- # Ensure updating trustor's password revokes existing user's tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
-
- def test_v2_validate_trust_scoped_token(self):
- # Test that validating an trust scoped token in v2.0 returns
- # unauthorized.
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- self.assertRaises(exception.Unauthorized,
- self.token_provider_api.validate_v2_token,
- trust_scoped_token)
-
- def test_default_fixture_scope_token(self):
- self.assertIsNotNone(self.get_scoped_token())
-
- def test_v3_v2_intermix_new_default_domain(self):
- # If the default_domain_id config option is changed, then should be
- # able to validate a v3 token with user in the new domain.
-
- # 1) Create a new domain for the user.
- new_domain = unit.new_domain_ref()
- self.resource_api.create_domain(new_domain['id'], new_domain)
-
- # 2) Create user in new domain.
- new_user = unit.create_user(self.identity_api,
- domain_id=new_domain['id'])
-
- # 3) Update the default_domain_id config option to the new domain
- self.config_fixture.config(
- group='identity',
- default_domain_id=new_domain['id'])
-
- # 4) Get a token using v3 API.
- v3_token = self.get_requested_token(self.build_authentication_request(
- user_id=new_user['id'],
- password=new_user['password']))
-
- # 5) Validate token using v2 API.
- self.admin_request(
- path='/v2.0/tokens/%s' % v3_token,
- token=self.get_admin_token(),
- method='GET')
-
- def test_v3_v2_intermix_domain_scoped_token_failed(self):
- # grant the domain role to user
- self.put(
- path='/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id']))
-
- # generate a domain-scoped v3 token
- v3_token = self.get_requested_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain['id']))
-
- # domain-scoped tokens are not supported by v2
- self.admin_request(
- method='GET',
- path='/v2.0/tokens/%s' % v3_token,
- token=self.get_admin_token(),
- expected_status=http_client.UNAUTHORIZED)
-
- def test_v3_v2_intermix_non_default_project_succeed(self):
- # self.project is in a non-default domain
- v3_token = self.get_requested_token(self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.project['id']))
-
- # v2 cannot reference projects outside the default domain
- self.admin_request(
- method='GET',
- path='/v2.0/tokens/%s' % v3_token,
- token=self.get_admin_token())
-
- def test_v3_v2_intermix_non_default_user_succeed(self):
- self.assignment_api.create_grant(
- self.role['id'],
- user_id=self.user['id'],
- project_id=self.default_domain_project['id'])
-
- # self.user is in a non-default domain
- v3_token = self.get_requested_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.default_domain_project['id']))
-
- # v2 cannot reference projects outside the default domain
- self.admin_request(
- method='GET',
- path='/v2.0/tokens/%s' % v3_token,
- token=self.get_admin_token())
-
- def test_v3_v2_intermix_domain_scope_failed(self):
- self.assignment_api.create_grant(
- self.role['id'],
- user_id=self.default_domain_user['id'],
- domain_id=self.domain['id'])
-
- v3_token = self.get_requested_token(self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- domain_id=self.domain['id']))
-
- # v2 cannot reference projects outside the default domain
- self.admin_request(
- path='/v2.0/tokens/%s' % v3_token,
- token=self.get_admin_token(),
- method='GET',
- expected_status=http_client.UNAUTHORIZED)
-
- def test_v3_v2_unscoped_token_intermix(self):
- r = self.v3_create_token(self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password']))
- self.assertValidUnscopedTokenResponse(r)
- v3_token_data = r.result
- v3_token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- r = self.admin_request(
- path='/v2.0/tokens/%s' % v3_token,
- token=self.get_admin_token(),
- method='GET')
- v2_token_data = r.result
-
- self.assertEqual(v2_token_data['access']['user']['id'],
- v3_token_data['token']['user']['id'])
- # v2 token time has not fraction of second precision so
- # just need to make sure the non fraction part agrees
- self.assertIn(v2_token_data['access']['token']['expires'][:-1],
- v3_token_data['token']['expires_at'])
-
- def test_v3_v2_token_intermix(self):
- # FIXME(gyee): PKI tokens are not interchangeable because token
- # data is baked into the token itself.
- r = self.v3_create_token(self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project['id']))
- self.assertValidProjectScopedTokenResponse(r)
- v3_token_data = r.result
- v3_token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- r = self.admin_request(
- method='GET',
- path='/v2.0/tokens/%s' % v3_token,
- token=self.get_admin_token())
- v2_token_data = r.result
-
- self.assertEqual(v2_token_data['access']['user']['id'],
- v3_token_data['token']['user']['id'])
- # v2 token time has not fraction of second precision so
- # just need to make sure the non fraction part agrees
- self.assertIn(v2_token_data['access']['token']['expires'][:-1],
- v3_token_data['token']['expires_at'])
- self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'],
- v3_token_data['token']['roles'][0]['name'])
-
- def test_v2_v3_unscoped_token_intermix(self):
- r = self.admin_request(
- method='POST',
- path='/v2.0/tokens',
- body={
- 'auth': {
- 'passwordCredentials': {
- 'userId': self.default_domain_user['id'],
- 'password': self.default_domain_user['password']
- }
- }
- })
- v2_token_data = r.result
- v2_token = v2_token_data['access']['token']['id']
-
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token})
- self.assertValidUnscopedTokenResponse(r)
- v3_token_data = r.result
-
- self.assertEqual(v2_token_data['access']['user']['id'],
- v3_token_data['token']['user']['id'])
- # v2 token time has not fraction of second precision so
- # just need to make sure the non fraction part agrees
- self.assertIn(v2_token_data['access']['token']['expires'][-1],
- v3_token_data['token']['expires_at'])
-
- def test_v2_v3_token_intermix(self):
- r = self.admin_request(
- path='/v2.0/tokens',
- method='POST',
- body={
- 'auth': {
- 'passwordCredentials': {
- 'userId': self.default_domain_user['id'],
- 'password': self.default_domain_user['password']
- },
- 'tenantId': self.default_domain_project['id']
- }
- })
- v2_token_data = r.result
- v2_token = v2_token_data['access']['token']['id']
-
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token})
- self.assertValidProjectScopedTokenResponse(r)
- v3_token_data = r.result
-
- self.assertEqual(v2_token_data['access']['user']['id'],
- v3_token_data['token']['user']['id'])
- # v2 token time has not fraction of second precision so
- # just need to make sure the non fraction part agrees
- self.assertIn(v2_token_data['access']['token']['expires'][-1],
- v3_token_data['token']['expires_at'])
- self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'],
- v3_token_data['token']['roles'][0]['name'])
-
- v2_issued_at = timeutils.parse_isotime(
- v2_token_data['access']['token']['issued_at'])
- v3_issued_at = timeutils.parse_isotime(
- v3_token_data['token']['issued_at'])
-
- self.assertEqual(v2_issued_at, v3_issued_at)
-
- def test_v2_token_deleted_on_v3(self):
- # Create a v2 token.
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'userId': self.default_domain_user['id'],
- 'password': self.default_domain_user['password']
- },
- 'tenantId': self.default_domain_project['id']
- }
- }
- r = self.admin_request(
- path='/v2.0/tokens', method='POST', body=body)
- v2_token = r.result['access']['token']['id']
-
- # Delete the v2 token using v3.
- self.delete(
- '/auth/tokens', headers={'X-Subject-Token': v2_token})
-
- # Attempting to use the deleted token on v2 should fail.
- self.admin_request(
- path='/v2.0/tenants', method='GET', token=v2_token,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_rescoping_token(self):
- expires = self.v3_token_data['token']['expires_at']
-
- # rescope the token
- r = self.v3_create_token(self.build_authentication_request(
- token=self.v3_token,
- project_id=self.project_id))
- self.assertValidProjectScopedTokenResponse(r)
-
- # ensure token expiration stayed the same
- self.assertEqual(expires, r.result['token']['expires_at'])
-
- def test_check_token(self):
- self.head('/auth/tokens', headers=self.headers,
- expected_status=http_client.OK)
-
- def test_validate_token(self):
- r = self.get('/auth/tokens', headers=self.headers)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_validate_missing_subject_token(self):
- self.get('/auth/tokens',
- expected_status=http_client.NOT_FOUND)
-
- def test_validate_missing_auth_token(self):
- self.admin_request(
- method='GET',
- path='/v3/projects',
- token=None,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_validate_token_nocatalog(self):
- v3_token = self.get_requested_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id']))
- r = self.get(
- '/auth/tokens?nocatalog',
- headers={'X-Subject-Token': v3_token})
- self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
-
- def test_is_admin_token_by_ids(self):
- self.config_fixture.config(
- group='resource',
- admin_project_domain_name=self.domain['name'],
- admin_project_name=self.project['name'])
- r = self.v3_create_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id']))
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
- v3_token = r.headers.get('X-Subject-Token')
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
-
- def test_is_admin_token_by_names(self):
- self.config_fixture.config(
- group='resource',
- admin_project_domain_name=self.domain['name'],
- admin_project_name=self.project['name'])
- r = self.v3_create_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_domain_name=self.domain['name'],
- project_name=self.project['name']))
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
- v3_token = r.headers.get('X-Subject-Token')
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
-
- def test_token_for_non_admin_project_is_not_admin(self):
- self.config_fixture.config(
- group='resource',
- admin_project_domain_name=self.domain['name'],
- admin_project_name=uuid.uuid4().hex)
- r = self.v3_create_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id']))
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
- v3_token = r.headers.get('X-Subject-Token')
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
-
- def test_token_for_non_admin_domain_same_project_name_is_not_admin(self):
- self.config_fixture.config(
- group='resource',
- admin_project_domain_name=uuid.uuid4().hex,
- admin_project_name=self.project['name'])
- r = self.v3_create_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id']))
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
- v3_token = r.headers.get('X-Subject-Token')
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
-
- def test_only_admin_project_set_acts_as_non_admin(self):
- self.config_fixture.config(
- group='resource',
- admin_project_name=self.project['name'])
- r = self.v3_create_token(self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id']))
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
- v3_token = r.headers.get('X-Subject-Token')
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
- self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
-
- def _create_role(self, domain_id=None):
- """Call ``POST /roles``."""
- ref = unit.new_role_ref(domain_id=domain_id)
- r = self.post('/roles', body={'role': ref})
- return self.assertValidRoleResponse(r, ref)
-
- def _create_implied_role(self, prior_id):
- implied = self._create_role()
- url = '/roles/%s/implies/%s' % (prior_id, implied['id'])
- self.put(url, expected_status=http_client.CREATED)
- return implied
-
- def _delete_implied_role(self, prior_role_id, implied_role_id):
- url = '/roles/%s/implies/%s' % (prior_role_id, implied_role_id)
- self.delete(url)
-
- def _get_scoped_token_roles(self, is_domain=False):
- if is_domain:
- v3_token = self.get_domain_scoped_token()
- else:
- v3_token = self.get_scoped_token()
-
- r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
- v3_token_data = r.result
- token_roles = v3_token_data['token']['roles']
- return token_roles
-
- def _create_implied_role_shows_in_v3_token(self, is_domain):
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(1, len(token_roles))
-
- prior = token_roles[0]['id']
- implied1 = self._create_implied_role(prior)
-
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(2, len(token_roles))
-
- implied2 = self._create_implied_role(prior)
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(3, len(token_roles))
-
- token_role_ids = [role['id'] for role in token_roles]
- self.assertIn(prior, token_role_ids)
- self.assertIn(implied1['id'], token_role_ids)
- self.assertIn(implied2['id'], token_role_ids)
-
- def test_create_implied_role_shows_in_v3_project_token(self):
- # regardless of the default chosen, this should always
- # test with the option set.
- self.config_fixture.config(group='token', infer_roles=True)
- self._create_implied_role_shows_in_v3_token(False)
-
- def test_create_implied_role_shows_in_v3_domain_token(self):
- self.config_fixture.config(group='token', infer_roles=True)
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
-
- self._create_implied_role_shows_in_v3_token(True)
-
- def test_group_assigned_implied_role_shows_in_v3_token(self):
- self.config_fixture.config(group='token', infer_roles=True)
- is_domain = False
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(1, len(token_roles))
-
- new_role = self._create_role()
- prior = new_role['id']
-
- new_group_ref = unit.new_group_ref(domain_id=self.domain['id'])
- new_group = self.identity_api.create_group(new_group_ref)
- self.assignment_api.create_grant(prior,
- group_id=new_group['id'],
- project_id=self.project['id'])
-
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(1, len(token_roles))
-
- self.identity_api.add_user_to_group(self.user['id'],
- new_group['id'])
-
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(2, len(token_roles))
-
- implied1 = self._create_implied_role(prior)
-
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(3, len(token_roles))
-
- implied2 = self._create_implied_role(prior)
- token_roles = self._get_scoped_token_roles(is_domain)
- self.assertEqual(4, len(token_roles))
-
- token_role_ids = [role['id'] for role in token_roles]
- self.assertIn(prior, token_role_ids)
- self.assertIn(implied1['id'], token_role_ids)
- self.assertIn(implied2['id'], token_role_ids)
-
- def test_multiple_implied_roles_show_in_v3_token(self):
- self.config_fixture.config(group='token', infer_roles=True)
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(1, len(token_roles))
-
- prior = token_roles[0]['id']
- implied1 = self._create_implied_role(prior)
- implied2 = self._create_implied_role(prior)
- implied3 = self._create_implied_role(prior)
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(4, len(token_roles))
-
- token_role_ids = [role['id'] for role in token_roles]
- self.assertIn(prior, token_role_ids)
- self.assertIn(implied1['id'], token_role_ids)
- self.assertIn(implied2['id'], token_role_ids)
- self.assertIn(implied3['id'], token_role_ids)
-
- def test_chained_implied_role_shows_in_v3_token(self):
- self.config_fixture.config(group='token', infer_roles=True)
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(1, len(token_roles))
-
- prior = token_roles[0]['id']
- implied1 = self._create_implied_role(prior)
- implied2 = self._create_implied_role(implied1['id'])
- implied3 = self._create_implied_role(implied2['id'])
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(4, len(token_roles))
-
- token_role_ids = [role['id'] for role in token_roles]
-
- self.assertIn(prior, token_role_ids)
- self.assertIn(implied1['id'], token_role_ids)
- self.assertIn(implied2['id'], token_role_ids)
- self.assertIn(implied3['id'], token_role_ids)
-
- def test_implied_role_disabled_by_config(self):
- self.config_fixture.config(group='token', infer_roles=False)
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(1, len(token_roles))
-
- prior = token_roles[0]['id']
- implied1 = self._create_implied_role(prior)
- implied2 = self._create_implied_role(implied1['id'])
- self._create_implied_role(implied2['id'])
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(1, len(token_roles))
- token_role_ids = [role['id'] for role in token_roles]
- self.assertIn(prior, token_role_ids)
-
- def test_delete_implied_role_do_not_show_in_v3_token(self):
- self.config_fixture.config(group='token', infer_roles=True)
- token_roles = self._get_scoped_token_roles()
- prior = token_roles[0]['id']
- implied = self._create_implied_role(prior)
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(2, len(token_roles))
- self._delete_implied_role(prior, implied['id'])
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(1, len(token_roles))
-
- def test_unrelated_implied_roles_do_not_change_v3_token(self):
- self.config_fixture.config(group='token', infer_roles=True)
- token_roles = self._get_scoped_token_roles()
- prior = token_roles[0]['id']
- implied = self._create_implied_role(prior)
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(2, len(token_roles))
-
- unrelated = self._create_role()
- url = '/roles/%s/implies/%s' % (unrelated['id'], implied['id'])
- self.put(url, expected_status=http_client.CREATED)
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(2, len(token_roles))
-
- self._delete_implied_role(unrelated['id'], implied['id'])
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(2, len(token_roles))
-
- def test_domain_scpecific_roles_do_not_show_v3_token(self):
- self.config_fixture.config(group='token', infer_roles=True)
- initial_token_roles = self._get_scoped_token_roles()
-
- new_role = self._create_role(domain_id=self.domain_id)
- self.assignment_api.create_grant(new_role['id'],
- user_id=self.user['id'],
- project_id=self.project['id'])
- implied = self._create_implied_role(new_role['id'])
-
- token_roles = self._get_scoped_token_roles()
- self.assertEqual(len(initial_token_roles) + 1, len(token_roles))
-
- # The implied role from the domain specific role should be in the
- # token, but not the domain specific role itself.
- token_role_ids = [role['id'] for role in token_roles]
- self.assertIn(implied['id'], token_role_ids)
- self.assertNotIn(new_role['id'], token_role_ids)
-
- def test_remove_all_roles_from_scope_result_in_404(self):
- # create a new user
- new_user = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
-
- # give the new user a role on a project
- path = '/projects/%s/users/%s/roles/%s' % (
- self.project['id'], new_user['id'], self.role['id'])
- self.put(path=path)
-
- # authenticate as the new user and get a project-scoped token
- auth_data = self.build_authentication_request(
- user_id=new_user['id'],
- password=new_user['password'],
- project_id=self.project['id'])
- subject_token_id = self.v3_create_token(auth_data).headers.get(
- 'X-Subject-Token')
-
- # make sure the project-scoped token is valid
- headers = {'X-Subject-Token': subject_token_id}
- r = self.get('/auth/tokens', headers=headers)
- self.assertValidProjectScopedTokenResponse(r)
-
- # remove the roles from the user for the given scope
- path = '/projects/%s/users/%s/roles/%s' % (
- self.project['id'], new_user['id'], self.role['id'])
- self.delete(path=path)
-
- # token validation should now result in 404
- self.get('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
-
-
-class TokenDataTests(object):
- """Test the data in specific token types."""
-
- def test_unscoped_token_format(self):
- # ensure the unscoped token response contains the appropriate data
- r = self.get('/auth/tokens', headers=self.headers)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_domain_scoped_token_format(self):
- # ensure the domain scoped token response contains the appropriate data
- self.assignment_api.create_grant(
- self.role['id'],
- user_id=self.default_domain_user['id'],
- domain_id=self.domain['id'])
-
- domain_scoped_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- domain_id=self.domain['id'])
- )
- self.headers['X-Subject-Token'] = domain_scoped_token
- r = self.get('/auth/tokens', headers=self.headers)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_project_scoped_token_format(self):
- # ensure project scoped token responses contains the appropriate data
- project_scoped_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project['id'])
- )
- self.headers['X-Subject-Token'] = project_scoped_token
- r = self.get('/auth/tokens', headers=self.headers)
- self.assertValidProjectScopedTokenResponse(r)
-
- def test_extra_data_in_unscoped_token_fails_validation(self):
- # ensure unscoped token response contains the appropriate data
- r = self.get('/auth/tokens', headers=self.headers)
-
- # populate the response result with some extra data
- r.result['token'][u'extra'] = unicode(uuid.uuid4().hex)
- self.assertRaises(exception.SchemaValidationError,
- self.assertValidUnscopedTokenResponse,
- r)
-
- def test_extra_data_in_domain_scoped_token_fails_validation(self):
- # ensure domain scoped token response contains the appropriate data
- self.assignment_api.create_grant(
- self.role['id'],
- user_id=self.default_domain_user['id'],
- domain_id=self.domain['id'])
-
- domain_scoped_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- domain_id=self.domain['id'])
- )
- self.headers['X-Subject-Token'] = domain_scoped_token
- r = self.get('/auth/tokens', headers=self.headers)
-
- # populate the response result with some extra data
- r.result['token'][u'extra'] = unicode(uuid.uuid4().hex)
- self.assertRaises(exception.SchemaValidationError,
- self.assertValidDomainScopedTokenResponse,
- r)
-
- def test_extra_data_in_project_scoped_token_fails_validation(self):
- # ensure project scoped token responses contains the appropriate data
- project_scoped_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project['id'])
- )
- self.headers['X-Subject-Token'] = project_scoped_token
- resp = self.get('/auth/tokens', headers=self.headers)
-
- # populate the response result with some extra data
- resp.result['token'][u'extra'] = unicode(uuid.uuid4().hex)
- self.assertRaises(exception.SchemaValidationError,
- self.assertValidProjectScopedTokenResponse,
- resp)
-
-
-class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
- def config_overrides(self):
- super(AllowRescopeScopedTokenDisabledTests, self).config_overrides()
- self.config_fixture.config(
- group='token',
- allow_rescope_scoped_token=False)
-
- def test_rescoping_v3_to_v3_disabled(self):
- self.v3_create_token(
- self.build_authentication_request(
- token=self.get_scoped_token(),
- project_id=self.project_id),
- expected_status=http_client.FORBIDDEN)
-
- def _v2_token(self):
- body = {
- 'auth': {
- "tenantId": self.default_domain_project['id'],
- 'passwordCredentials': {
- 'userId': self.default_domain_user['id'],
- 'password': self.default_domain_user['password']
- }
- }}
- resp = self.admin_request(path='/v2.0/tokens',
- method='POST',
- body=body)
- v2_token_data = resp.result
- return v2_token_data
-
- def _v2_token_from_token(self, token):
- body = {
- 'auth': {
- "tenantId": self.project['id'],
- "token": token
- }}
- self.admin_request(path='/v2.0/tokens',
- method='POST',
- body=body,
- expected_status=http_client.FORBIDDEN)
-
- def test_rescoping_v2_to_v3_disabled(self):
- token = self._v2_token()
- self.v3_create_token(
- self.build_authentication_request(
- token=token['access']['token']['id'],
- project_id=self.project_id),
- expected_status=http_client.FORBIDDEN)
-
- def test_rescoping_v3_to_v2_disabled(self):
- token = {'id': self.get_scoped_token()}
- self._v2_token_from_token(token)
-
- def test_rescoping_v2_to_v2_disabled(self):
- token = self._v2_token()
- self._v2_token_from_token(token['access']['token'])
-
- def test_rescoped_domain_token_disabled(self):
-
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domainA['id'])
- unscoped_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password']))
- # Get a domain-scoped token from the unscoped token
- domain_scoped_token = self.get_requested_token(
- self.build_authentication_request(
- token=unscoped_token,
- domain_id=self.domainA['id']))
- self.v3_create_token(
- self.build_authentication_request(
- token=domain_scoped_token,
- project_id=self.project_id),
- expected_status=http_client.FORBIDDEN)
-
-
-class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests, TokenDataTests):
- def config_overrides(self):
- super(TestPKITokenAPIs, self).config_overrides()
- self.config_fixture.config(group='token', provider='pki')
-
- def setUp(self):
- super(TestPKITokenAPIs, self).setUp()
- self.doSetUp()
-
- def verify_token(self, *args, **kwargs):
- return cms.verify_token(*args, **kwargs)
-
- def test_v3_token_id(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- resp = self.v3_create_token(auth_data)
- token_data = resp.result
- token_id = resp.headers.get('X-Subject-Token')
- self.assertIn('expires_at', token_data['token'])
-
- decoded_token = self.verify_token(token_id, CONF.signing.certfile,
- CONF.signing.ca_certs)
- decoded_token_dict = json.loads(decoded_token)
-
- token_resp_dict = json.loads(resp.body)
-
- self.assertEqual(decoded_token_dict, token_resp_dict)
- # should be able to validate hash PKI token as well
- hash_token_id = cms.cms_hash_token(token_id)
- headers = {'X-Subject-Token': hash_token_id}
- resp = self.get('/auth/tokens', headers=headers)
- expected_token_data = resp.result
- self.assertDictEqual(expected_token_data, token_data)
-
- def test_v3_v2_hashed_pki_token_intermix(self):
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project['id'])
- resp = self.v3_create_token(auth_data)
- token_data = resp.result
- token = resp.headers.get('X-Subject-Token')
-
- # should be able to validate a hash PKI token in v2 too
- token = cms.cms_hash_token(token)
- path = '/v2.0/tokens/%s' % (token)
- resp = self.admin_request(path=path,
- token=self.get_admin_token(),
- method='GET')
- v2_token = resp.result
- self.assertEqual(v2_token['access']['user']['id'],
- token_data['token']['user']['id'])
- # v2 token time has not fraction of second precision so
- # just need to make sure the non fraction part agrees
- self.assertIn(v2_token['access']['token']['expires'][:-1],
- token_data['token']['expires_at'])
- self.assertEqual(v2_token['access']['user']['roles'][0]['name'],
- token_data['token']['roles'][0]['name'])
-
-
-class TestPKIZTokenAPIs(TestPKITokenAPIs):
- def config_overrides(self):
- super(TestPKIZTokenAPIs, self).config_overrides()
- self.config_fixture.config(group='token', provider='pkiz')
-
- def verify_token(self, *args, **kwargs):
- return cms.pkiz_verify(*args, **kwargs)
-
-
-class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests,
- TokenDataTests):
- def config_overrides(self):
- super(TestUUIDTokenAPIs, self).config_overrides()
- self.config_fixture.config(group='token', provider='uuid')
-
- def setUp(self):
- super(TestUUIDTokenAPIs, self).setUp()
- self.doSetUp()
-
- def test_v3_token_id(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- resp = self.v3_create_token(auth_data)
- token_data = resp.result
- token_id = resp.headers.get('X-Subject-Token')
- self.assertIn('expires_at', token_data['token'])
- self.assertFalse(cms.is_asn1_token(token_id))
-
-
-class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests,
- TokenDataTests):
- def config_overrides(self):
- super(TestFernetTokenAPIs, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- def setUp(self):
- super(TestFernetTokenAPIs, self).setUp()
- self.doSetUp()
-
- def _make_auth_request(self, auth_data):
- token = super(TestFernetTokenAPIs, self)._make_auth_request(auth_data)
- self.assertLess(len(token), 255)
- return token
-
- def test_validate_tampered_unscoped_token_fails(self):
- unscoped_token = self._get_unscoped_token()
- tampered_token = (unscoped_token[:50] + uuid.uuid4().hex +
- unscoped_token[50 + 32:])
- self._validate_token(tampered_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_validate_tampered_project_scoped_token_fails(self):
- project_scoped_token = self._get_project_scoped_token()
- tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex +
- project_scoped_token[50 + 32:])
- self._validate_token(tampered_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_validate_tampered_trust_scoped_token_fails(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Get a trust scoped token
- tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex +
- trust_scoped_token[50 + 32:])
- self._validate_token(tampered_token,
- expected_status=http_client.NOT_FOUND)
-
-
-class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
- """Test token revoke using v3 Identity API by token owner and admin."""
-
- def load_sample_data(self):
- """Load Sample Data for Test Cases.
-
- Two domains, domainA and domainB
- Two users in domainA, userNormalA and userAdminA
- One user in domainB, userAdminB
-
- """
- super(TestTokenRevokeSelfAndAdmin, self).load_sample_data()
- # DomainA setup
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'], self.domainA)
-
- self.userAdminA = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
-
- self.userNormalA = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
-
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.userAdminA['id'],
- domain_id=self.domainA['id'])
-
- def _policy_fixture(self):
- return ksfixtures.Policy(unit.dirs.etc('policy.v3cloudsample.json'),
- self.config_fixture)
-
- def test_user_revokes_own_token(self):
- user_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.userNormalA['id'],
- password=self.userNormalA['password'],
- user_domain_id=self.domainA['id']))
- self.assertNotEmpty(user_token)
- headers = {'X-Subject-Token': user_token}
-
- adminA_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.userAdminA['id'],
- password=self.userAdminA['password'],
- domain_name=self.domainA['name']))
-
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.OK,
- token=adminA_token)
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.OK,
- token=user_token)
- self.delete('/auth/tokens', headers=headers,
- token=user_token)
- # invalid X-Auth-Token and invalid X-Subject-Token
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.UNAUTHORIZED,
- token=user_token)
- # invalid X-Auth-Token and invalid X-Subject-Token
- self.delete('/auth/tokens', headers=headers,
- expected_status=http_client.UNAUTHORIZED,
- token=user_token)
- # valid X-Auth-Token and invalid X-Subject-Token
- self.delete('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND,
- token=adminA_token)
- # valid X-Auth-Token and invalid X-Subject-Token
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND,
- token=adminA_token)
-
- def test_adminA_revokes_userA_token(self):
- user_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.userNormalA['id'],
- password=self.userNormalA['password'],
- user_domain_id=self.domainA['id']))
- self.assertNotEmpty(user_token)
- headers = {'X-Subject-Token': user_token}
-
- adminA_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.userAdminA['id'],
- password=self.userAdminA['password'],
- domain_name=self.domainA['name']))
-
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.OK,
- token=adminA_token)
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.OK,
- token=user_token)
- self.delete('/auth/tokens', headers=headers,
- token=adminA_token)
- # invalid X-Auth-Token and invalid X-Subject-Token
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.UNAUTHORIZED,
- token=user_token)
- # valid X-Auth-Token and invalid X-Subject-Token
- self.delete('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND,
- token=adminA_token)
- # valid X-Auth-Token and invalid X-Subject-Token
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND,
- token=adminA_token)
-
- def test_adminB_fails_revoking_userA_token(self):
- # DomainB setup
- self.domainB = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainB['id'], self.domainB)
- userAdminB = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
- self.assignment_api.create_grant(self.role['id'],
- user_id=userAdminB['id'],
- domain_id=self.domainB['id'])
-
- user_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.userNormalA['id'],
- password=self.userNormalA['password'],
- user_domain_id=self.domainA['id']))
- headers = {'X-Subject-Token': user_token}
-
- adminB_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=userAdminB['id'],
- password=userAdminB['password'],
- domain_name=self.domainB['name']))
-
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.FORBIDDEN,
- token=adminB_token)
- self.delete('/auth/tokens', headers=headers,
- expected_status=http_client.FORBIDDEN,
- token=adminB_token)
-
-
-class TestTokenRevokeById(test_v3.RestfulTestCase):
- """Test token revocation on the v3 Identity API."""
-
- def config_overrides(self):
- super(TestTokenRevokeById, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='pki',
- revoke_by_id=False)
-
- def setUp(self):
- """Setup for Token Revoking Test Cases.
-
- As well as the usual housekeeping, create a set of domains,
- users, groups, roles and projects for the subsequent tests:
-
- - Two domains: A & B
- - Three users (1, 2 and 3)
- - Three groups (1, 2 and 3)
- - Two roles (1 and 2)
- - DomainA owns user1, domainB owns user2 and user3
- - DomainA owns group1 and group2, domainB owns group3
- - User1 and user2 are members of group1
- - User3 is a member of group2
- - Two projects: A & B, both in domainA
- - Group1 has role1 on Project A and B, meaning that user1 and user2
- will get these roles by virtue of membership
- - User1, 2 and 3 have role1 assigned to projectA
- - Group1 has role1 on Project A and B, meaning that user1 and user2
- will get role1 (duplicated) by virtue of membership
- - User1 has role2 assigned to domainA
-
- """
- super(TestTokenRevokeById, self).setUp()
-
- # Start by creating a couple of domains and projects
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.projectA = unit.new_project_ref(domain_id=self.domainA['id'])
- self.resource_api.create_project(self.projectA['id'], self.projectA)
- self.projectB = unit.new_project_ref(domain_id=self.domainA['id'])
- self.resource_api.create_project(self.projectB['id'], self.projectB)
-
- # Now create some users
- self.user1 = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
-
- self.user2 = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
-
- self.user3 = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
-
- self.group1 = unit.new_group_ref(domain_id=self.domainA['id'])
- self.group1 = self.identity_api.create_group(self.group1)
-
- self.group2 = unit.new_group_ref(domain_id=self.domainA['id'])
- self.group2 = self.identity_api.create_group(self.group2)
-
- self.group3 = unit.new_group_ref(domain_id=self.domainB['id'])
- self.group3 = self.identity_api.create_group(self.group3)
-
- self.identity_api.add_user_to_group(self.user1['id'],
- self.group1['id'])
- self.identity_api.add_user_to_group(self.user2['id'],
- self.group1['id'])
- self.identity_api.add_user_to_group(self.user3['id'],
- self.group2['id'])
-
- self.role1 = unit.new_role_ref()
- self.role_api.create_role(self.role1['id'], self.role1)
- self.role2 = unit.new_role_ref()
- self.role_api.create_role(self.role2['id'], self.role2)
-
- self.assignment_api.create_grant(self.role2['id'],
- user_id=self.user1['id'],
- domain_id=self.domainA['id'])
- self.assignment_api.create_grant(self.role1['id'],
- user_id=self.user1['id'],
- project_id=self.projectA['id'])
- self.assignment_api.create_grant(self.role1['id'],
- user_id=self.user2['id'],
- project_id=self.projectA['id'])
- self.assignment_api.create_grant(self.role1['id'],
- user_id=self.user3['id'],
- project_id=self.projectA['id'])
- self.assignment_api.create_grant(self.role1['id'],
- group_id=self.group1['id'],
- project_id=self.projectA['id'])
-
- def test_unscoped_token_remains_valid_after_role_assignment(self):
- unscoped_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password']))
-
- scoped_token = self.get_requested_token(
- self.build_authentication_request(
- token=unscoped_token,
- project_id=self.projectA['id']))
-
- # confirm both tokens are valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': unscoped_token},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': scoped_token},
- expected_status=http_client.OK)
-
- # create a new role
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- # assign a new role
- self.put(
- '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
- 'project_id': self.projectA['id'],
- 'user_id': self.user1['id'],
- 'role_id': role['id']})
-
- # both tokens should remain valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': unscoped_token},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': scoped_token},
- expected_status=http_client.OK)
-
- def test_deleting_user_grant_revokes_token(self):
- """Test deleting a user grant revokes token.
-
- Test Plan:
-
- - Get a token for user1, scoped to ProjectA
- - Delete the grant user1 has on ProjectA
- - Check token is no longer valid
-
- """
- auth_data = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id'])
- token = self.get_requested_token(auth_data)
- # Confirm token is valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
- # Delete the grant, which should invalidate the token
- grant_url = (
- '/projects/%(project_id)s/users/%(user_id)s/'
- 'roles/%(role_id)s' % {
- 'project_id': self.projectA['id'],
- 'user_id': self.user1['id'],
- 'role_id': self.role1['id']})
- self.delete(grant_url)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.NOT_FOUND)
-
- def role_data_fixtures(self):
- self.projectC = unit.new_project_ref(domain_id=self.domainA['id'])
- self.resource_api.create_project(self.projectC['id'], self.projectC)
- self.user4 = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
- self.user5 = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
- self.user6 = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
- self.identity_api.add_user_to_group(self.user5['id'],
- self.group1['id'])
- self.assignment_api.create_grant(self.role1['id'],
- group_id=self.group1['id'],
- project_id=self.projectB['id'])
- self.assignment_api.create_grant(self.role2['id'],
- user_id=self.user4['id'],
- project_id=self.projectC['id'])
- self.assignment_api.create_grant(self.role1['id'],
- user_id=self.user6['id'],
- project_id=self.projectA['id'])
- self.assignment_api.create_grant(self.role1['id'],
- user_id=self.user6['id'],
- domain_id=self.domainA['id'])
-
- def test_deleting_role_revokes_token(self):
- """Test deleting a role revokes token.
-
- Add some additional test data, namely:
-
- - A third project (project C)
- - Three additional users - user4 owned by domainB and user5 and 6 owned
- by domainA (different domain ownership should not affect the test
- results, just provided to broaden test coverage)
- - User5 is a member of group1
- - Group1 gets an additional assignment - role1 on projectB as well as
- its existing role1 on projectA
- - User4 has role2 on Project C
- - User6 has role1 on projectA and domainA
- - This allows us to create 5 tokens by virtue of different types of
- role assignment:
- - user1, scoped to ProjectA by virtue of user role1 assignment
- - user5, scoped to ProjectB by virtue of group role1 assignment
- - user4, scoped to ProjectC by virtue of user role2 assignment
- - user6, scoped to ProjectA by virtue of user role1 assignment
- - user6, scoped to DomainA by virtue of user role1 assignment
- - role1 is then deleted
- - Check the tokens on Project A and B, and DomainA are revoked, but not
- the one for Project C
-
- """
- self.role_data_fixtures()
-
- # Now we are ready to start issuing requests
- auth_data = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id'])
- tokenA = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- user_id=self.user5['id'],
- password=self.user5['password'],
- project_id=self.projectB['id'])
- tokenB = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- user_id=self.user4['id'],
- password=self.user4['password'],
- project_id=self.projectC['id'])
- tokenC = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- user_id=self.user6['id'],
- password=self.user6['password'],
- project_id=self.projectA['id'])
- tokenD = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- user_id=self.user6['id'],
- password=self.user6['password'],
- domain_id=self.domainA['id'])
- tokenE = self.get_requested_token(auth_data)
- # Confirm tokens are valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenA},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenB},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenC},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenD},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenE},
- expected_status=http_client.OK)
-
- # Delete the role, which should invalidate the tokens
- role_url = '/roles/%s' % self.role1['id']
- self.delete(role_url)
-
- # Check the tokens that used role1 is invalid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenA},
- expected_status=http_client.NOT_FOUND)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenB},
- expected_status=http_client.NOT_FOUND)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenD},
- expected_status=http_client.NOT_FOUND)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenE},
- expected_status=http_client.NOT_FOUND)
-
- # ...but the one using role2 is still valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': tokenC},
- expected_status=http_client.OK)
-
- def test_domain_user_role_assignment_maintains_token(self):
- """Test user-domain role assignment maintains existing token.
-
- Test Plan:
-
- - Get a token for user1, scoped to ProjectA
- - Create a grant for user1 on DomainB
- - Check token is still valid
-
- """
- auth_data = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id'])
- token = self.get_requested_token(auth_data)
- # Confirm token is valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
- # Assign a role, which should not affect the token
- grant_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/'
- 'roles/%(role_id)s' % {
- 'domain_id': self.domainB['id'],
- 'user_id': self.user1['id'],
- 'role_id': self.role1['id']})
- self.put(grant_url)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
-
- def test_disabling_project_revokes_token(self):
- token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user3['id'],
- password=self.user3['password'],
- project_id=self.projectA['id']))
-
- # confirm token is valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
-
- # disable the project, which should invalidate the token
- self.patch(
- '/projects/%(project_id)s' % {'project_id': self.projectA['id']},
- body={'project': {'enabled': False}})
-
- # user should no longer have access to the project
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.NOT_FOUND)
- self.v3_create_token(
- self.build_authentication_request(
- user_id=self.user3['id'],
- password=self.user3['password'],
- project_id=self.projectA['id']),
- expected_status=http_client.UNAUTHORIZED)
-
- def test_deleting_project_revokes_token(self):
- token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user3['id'],
- password=self.user3['password'],
- project_id=self.projectA['id']))
-
- # confirm token is valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
-
- # delete the project, which should invalidate the token
- self.delete(
- '/projects/%(project_id)s' % {'project_id': self.projectA['id']})
-
- # user should no longer have access to the project
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.NOT_FOUND)
- self.v3_create_token(
- self.build_authentication_request(
- user_id=self.user3['id'],
- password=self.user3['password'],
- project_id=self.projectA['id']),
- expected_status=http_client.UNAUTHORIZED)
-
- def test_deleting_group_grant_revokes_tokens(self):
- """Test deleting a group grant revokes tokens.
-
- Test Plan:
-
- - Get a token for user1, scoped to ProjectA
- - Get a token for user2, scoped to ProjectA
- - Get a token for user3, scoped to ProjectA
- - Delete the grant group1 has on ProjectA
- - Check tokens for user1 & user2 are no longer valid,
- since user1 and user2 are members of group1
- - Check token for user3 is invalid too
-
- """
- auth_data = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id'])
- token1 = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- user_id=self.user2['id'],
- password=self.user2['password'],
- project_id=self.projectA['id'])
- token2 = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- user_id=self.user3['id'],
- password=self.user3['password'],
- project_id=self.projectA['id'])
- token3 = self.get_requested_token(auth_data)
- # Confirm tokens are valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token1},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token2},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token3},
- expected_status=http_client.OK)
- # Delete the group grant, which should invalidate the
- # tokens for user1 and user2
- grant_url = (
- '/projects/%(project_id)s/groups/%(group_id)s/'
- 'roles/%(role_id)s' % {
- 'project_id': self.projectA['id'],
- 'group_id': self.group1['id'],
- 'role_id': self.role1['id']})
- self.delete(grant_url)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token1},
- expected_status=http_client.NOT_FOUND)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token2},
- expected_status=http_client.NOT_FOUND)
- # But user3's token should be invalid too as revocation is done for
- # scope role & project
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token3},
- expected_status=http_client.NOT_FOUND)
-
- def test_domain_group_role_assignment_maintains_token(self):
- """Test domain-group role assignment maintains existing token.
-
- Test Plan:
-
- - Get a token for user1, scoped to ProjectA
- - Create a grant for group1 on DomainB
- - Check token is still longer valid
-
- """
- auth_data = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id'])
- token = self.get_requested_token(auth_data)
- # Confirm token is valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
- # Delete the grant, which should invalidate the token
- grant_url = (
- '/domains/%(domain_id)s/groups/%(group_id)s/'
- 'roles/%(role_id)s' % {
- 'domain_id': self.domainB['id'],
- 'group_id': self.group1['id'],
- 'role_id': self.role1['id']})
- self.put(grant_url)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
-
- def test_group_membership_changes_revokes_token(self):
- """Test add/removal to/from group revokes token.
-
- Test Plan:
-
- - Get a token for user1, scoped to ProjectA
- - Get a token for user2, scoped to ProjectA
- - Remove user1 from group1
- - Check token for user1 is no longer valid
- - Check token for user2 is still valid, even though
- user2 is also part of group1
- - Add user2 to group2
- - Check token for user2 is now no longer valid
-
- """
- auth_data = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id'])
- token1 = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- user_id=self.user2['id'],
- password=self.user2['password'],
- project_id=self.projectA['id'])
- token2 = self.get_requested_token(auth_data)
- # Confirm tokens are valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token1},
- expected_status=http_client.OK)
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token2},
- expected_status=http_client.OK)
- # Remove user1 from group1, which should invalidate
- # the token
- self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group1['id'],
- 'user_id': self.user1['id']})
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token1},
- expected_status=http_client.NOT_FOUND)
- # But user2's token should still be valid
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token2},
- expected_status=http_client.OK)
- # Adding user2 to a group should not invalidate token
- self.put('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group2['id'],
- 'user_id': self.user2['id']})
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token2},
- expected_status=http_client.OK)
-
- def test_removing_role_assignment_does_not_affect_other_users(self):
- """Revoking a role from one user should not affect other users."""
- # This group grant is not needed for the test
- self.delete(
- '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' %
- {'project_id': self.projectA['id'],
- 'group_id': self.group1['id'],
- 'role_id': self.role1['id']})
-
- user1_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id']))
-
- user3_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user3['id'],
- password=self.user3['password'],
- project_id=self.projectA['id']))
-
- # delete relationships between user1 and projectA from setUp
- self.delete(
- '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
- 'project_id': self.projectA['id'],
- 'user_id': self.user1['id'],
- 'role_id': self.role1['id']})
- # authorization for the first user should now fail
- self.head('/auth/tokens',
- headers={'X-Subject-Token': user1_token},
- expected_status=http_client.NOT_FOUND)
- self.v3_create_token(
- self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id']),
- expected_status=http_client.UNAUTHORIZED)
-
- # authorization for the second user should still succeed
- self.head('/auth/tokens',
- headers={'X-Subject-Token': user3_token},
- expected_status=http_client.OK)
- self.v3_create_token(
- self.build_authentication_request(
- user_id=self.user3['id'],
- password=self.user3['password'],
- project_id=self.projectA['id']))
-
- def test_deleting_project_deletes_grants(self):
- # This is to make it a little bit more pretty with PEP8
- role_path = ('/projects/%(project_id)s/users/%(user_id)s/'
- 'roles/%(role_id)s')
- role_path = role_path % {'user_id': self.user['id'],
- 'project_id': self.projectA['id'],
- 'role_id': self.role['id']}
-
- # grant the user a role on the project
- self.put(role_path)
-
- # delete the project, which should remove the roles
- self.delete(
- '/projects/%(project_id)s' % {'project_id': self.projectA['id']})
-
- # Make sure that we get a 404 Not Found when heading that role.
- self.head(role_path, expected_status=http_client.NOT_FOUND)
-
- def get_v2_token(self, token=None, project_id=None):
- body = {'auth': {}, }
-
- if token:
- body['auth']['token'] = {
- 'id': token
- }
- else:
- body['auth']['passwordCredentials'] = {
- 'username': self.default_domain_user['name'],
- 'password': self.default_domain_user['password'],
- }
-
- if project_id:
- body['auth']['tenantId'] = project_id
-
- r = self.admin_request(method='POST', path='/v2.0/tokens', body=body)
- return r.json_body['access']['token']['id']
-
- def test_revoke_v2_token_no_check(self):
- # Test that a V2 token can be revoked without validating it first.
-
- token = self.get_v2_token()
-
- self.delete('/auth/tokens',
- headers={'X-Subject-Token': token})
-
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.NOT_FOUND)
-
- def test_revoke_token_from_token(self):
- # Test that a scoped token can be requested from an unscoped token,
- # the scoped token can be revoked, and the unscoped token remains
- # valid.
-
- unscoped_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password']))
-
- # Get a project-scoped token from the unscoped token
- project_scoped_token = self.get_requested_token(
- self.build_authentication_request(
- token=unscoped_token,
- project_id=self.projectA['id']))
-
- # Get a domain-scoped token from the unscoped token
- domain_scoped_token = self.get_requested_token(
- self.build_authentication_request(
- token=unscoped_token,
- domain_id=self.domainA['id']))
-
- # revoke the project-scoped token.
- self.delete('/auth/tokens',
- headers={'X-Subject-Token': project_scoped_token})
-
- # The project-scoped token is invalidated.
- self.head('/auth/tokens',
- headers={'X-Subject-Token': project_scoped_token},
- expected_status=http_client.NOT_FOUND)
-
- # The unscoped token should still be valid.
- self.head('/auth/tokens',
- headers={'X-Subject-Token': unscoped_token},
- expected_status=http_client.OK)
-
- # The domain-scoped token should still be valid.
- self.head('/auth/tokens',
- headers={'X-Subject-Token': domain_scoped_token},
- expected_status=http_client.OK)
-
- # revoke the domain-scoped token.
- self.delete('/auth/tokens',
- headers={'X-Subject-Token': domain_scoped_token})
-
- # The domain-scoped token is invalid.
- self.head('/auth/tokens',
- headers={'X-Subject-Token': domain_scoped_token},
- expected_status=http_client.NOT_FOUND)
-
- # The unscoped token should still be valid.
- self.head('/auth/tokens',
- headers={'X-Subject-Token': unscoped_token},
- expected_status=http_client.OK)
-
- def test_revoke_token_from_token_v2(self):
- # Test that a scoped token can be requested from an unscoped token,
- # the scoped token can be revoked, and the unscoped token remains
- # valid.
-
- unscoped_token = self.get_v2_token()
-
- # Get a project-scoped token from the unscoped token
- project_scoped_token = self.get_v2_token(
- token=unscoped_token, project_id=self.default_domain_project['id'])
-
- # revoke the project-scoped token.
- self.delete('/auth/tokens',
- headers={'X-Subject-Token': project_scoped_token})
-
- # The project-scoped token is invalidated.
- self.head('/auth/tokens',
- headers={'X-Subject-Token': project_scoped_token},
- expected_status=http_client.NOT_FOUND)
-
- # The unscoped token should still be valid.
- self.head('/auth/tokens',
- headers={'X-Subject-Token': unscoped_token},
- expected_status=http_client.OK)
-
-
-class TestTokenRevokeByAssignment(TestTokenRevokeById):
-
- def config_overrides(self):
- super(TestTokenRevokeById, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='uuid',
- revoke_by_id=True)
-
- def test_removing_role_assignment_keeps_other_project_token_groups(self):
- """Test assignment isolation.
-
- Revoking a group role from one project should not invalidate all group
- users' tokens
- """
- self.assignment_api.create_grant(self.role1['id'],
- group_id=self.group1['id'],
- project_id=self.projectB['id'])
-
- project_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectB['id']))
-
- other_project_token = self.get_requested_token(
- self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- project_id=self.projectA['id']))
-
- self.assignment_api.delete_grant(self.role1['id'],
- group_id=self.group1['id'],
- project_id=self.projectB['id'])
-
- # authorization for the projectA should still succeed
- self.head('/auth/tokens',
- headers={'X-Subject-Token': other_project_token},
- expected_status=http_client.OK)
- # while token for the projectB should not
- self.head('/auth/tokens',
- headers={'X-Subject-Token': project_token},
- expected_status=http_client.NOT_FOUND)
- revoked_tokens = [
- t['id'] for t in self.token_provider_api.list_revoked_tokens()]
- # token is in token revocation list
- self.assertIn(project_token, revoked_tokens)
-
-
-class RevokeContribTests(test_v3.RestfulTestCase):
-
- @mock.patch.object(versionutils, 'report_deprecated_feature')
- def test_exception_happens(self, mock_deprecator):
- routers.RevokeExtension(mock.ANY)
- mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("Remove revoke_extension from", args[1])
-
-
-class TestTokenRevokeApi(TestTokenRevokeById):
- """Test token revocation on the v3 Identity API."""
-
- def config_overrides(self):
- super(TestTokenRevokeApi, self).config_overrides()
- self.config_fixture.config(
- group='token',
- provider='pki',
- revoke_by_id=False)
-
- def assertValidDeletedProjectResponse(self, events_response, project_id):
- events = events_response['events']
- self.assertEqual(1, len(events))
- self.assertEqual(project_id, events[0]['project_id'])
- self.assertIsNotNone(events[0]['issued_before'])
- self.assertIsNotNone(events_response['links'])
- del (events_response['events'][0]['issued_before'])
- del (events_response['links'])
- expected_response = {'events': [{'project_id': project_id}]}
- self.assertEqual(expected_response, events_response)
-
- def assertDomainAndProjectInList(self, events_response, domain_id):
- events = events_response['events']
- self.assertEqual(2, len(events))
- self.assertEqual(domain_id, events[0]['project_id'])
- self.assertEqual(domain_id, events[1]['domain_id'])
- self.assertIsNotNone(events[0]['issued_before'])
- self.assertIsNotNone(events[1]['issued_before'])
- self.assertIsNotNone(events_response['links'])
- del (events_response['events'][0]['issued_before'])
- del (events_response['events'][1]['issued_before'])
- del (events_response['links'])
- expected_response = {'events': [{'project_id': domain_id},
- {'domain_id': domain_id}]}
- self.assertEqual(expected_response, events_response)
-
- def assertValidRevokedTokenResponse(self, events_response, **kwargs):
- events = events_response['events']
- self.assertEqual(1, len(events))
- for k, v in kwargs.items():
- self.assertEqual(v, events[0].get(k))
- self.assertIsNotNone(events[0]['issued_before'])
- self.assertIsNotNone(events_response['links'])
- del (events_response['events'][0]['issued_before'])
- del (events_response['links'])
-
- expected_response = {'events': [kwargs]}
- self.assertEqual(expected_response, events_response)
-
- def test_revoke_token(self):
- scoped_token = self.get_scoped_token()
- headers = {'X-Subject-Token': scoped_token}
- response = self.get('/auth/tokens', headers=headers).json_body['token']
-
- self.delete('/auth/tokens', headers=headers)
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
- events_response = self.get('/OS-REVOKE/events').json_body
- self.assertValidRevokedTokenResponse(events_response,
- audit_id=response['audit_ids'][0])
-
- def test_revoke_v2_token(self):
- token = self.get_v2_token()
- headers = {'X-Subject-Token': token}
- response = self.get('/auth/tokens',
- headers=headers).json_body['token']
- self.delete('/auth/tokens', headers=headers)
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
- events_response = self.get('/OS-REVOKE/events').json_body
-
- self.assertValidRevokedTokenResponse(
- events_response,
- audit_id=response['audit_ids'][0])
-
- def test_revoke_by_id_false_returns_gone(self):
- self.get('/auth/tokens/OS-PKI/revoked',
- expected_status=http_client.GONE)
-
- def test_list_delete_project_shows_in_event_list(self):
- self.role_data_fixtures()
- events = self.get('/OS-REVOKE/events').json_body['events']
- self.assertEqual([], events)
- self.delete(
- '/projects/%(project_id)s' % {'project_id': self.projectA['id']})
- events_response = self.get('/OS-REVOKE/events').json_body
-
- self.assertValidDeletedProjectResponse(events_response,
- self.projectA['id'])
-
- def test_disable_domain_shows_in_event_list(self):
- events = self.get('/OS-REVOKE/events').json_body['events']
- self.assertEqual([], events)
- disable_body = {'domain': {'enabled': False}}
- self.patch(
- '/domains/%(project_id)s' % {'project_id': self.domainA['id']},
- body=disable_body)
-
- events = self.get('/OS-REVOKE/events').json_body
-
- self.assertDomainAndProjectInList(events, self.domainA['id'])
-
- def assertEventDataInList(self, events, **kwargs):
- found = False
- for e in events:
- for key, value in kwargs.items():
- try:
- if e[key] != value:
- break
- except KeyError:
- # Break the loop and present a nice error instead of
- # KeyError
- break
- else:
- # If the value of the event[key] matches the value of the kwarg
- # for each item in kwargs, the event was fully matched and
- # the assertTrue below should succeed.
- found = True
- self.assertTrue(found,
- 'event with correct values not in list, expected to '
- 'find event with key-value pairs. Expected: '
- '"%(expected)s" Events: "%(events)s"' %
- {'expected': ','.join(
- ["'%s=%s'" % (k, v) for k, v in kwargs.items()]),
- 'events': events})
-
- def test_list_delete_token_shows_in_event_list(self):
- self.role_data_fixtures()
- events = self.get('/OS-REVOKE/events').json_body['events']
- self.assertEqual([], events)
-
- scoped_token = self.get_scoped_token()
- headers = {'X-Subject-Token': scoped_token}
- auth_req = self.build_authentication_request(token=scoped_token)
- response = self.v3_create_token(auth_req)
- token2 = response.json_body['token']
- headers2 = {'X-Subject-Token': response.headers['X-Subject-Token']}
-
- response = self.v3_create_token(auth_req)
- response.json_body['token']
- headers3 = {'X-Subject-Token': response.headers['X-Subject-Token']}
-
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.OK)
- self.head('/auth/tokens', headers=headers2,
- expected_status=http_client.OK)
- self.head('/auth/tokens', headers=headers3,
- expected_status=http_client.OK)
-
- self.delete('/auth/tokens', headers=headers)
- # NOTE(ayoung): not deleting token3, as it should be deleted
- # by previous
- events_response = self.get('/OS-REVOKE/events').json_body
- events = events_response['events']
- self.assertEqual(1, len(events))
- self.assertEventDataInList(
- events,
- audit_id=token2['audit_ids'][1])
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
- self.head('/auth/tokens', headers=headers2,
- expected_status=http_client.OK)
- self.head('/auth/tokens', headers=headers3,
- expected_status=http_client.OK)
-
- def test_list_with_filter(self):
-
- self.role_data_fixtures()
- events = self.get('/OS-REVOKE/events').json_body['events']
- self.assertEqual(0, len(events))
-
- scoped_token = self.get_scoped_token()
- headers = {'X-Subject-Token': scoped_token}
- auth = self.build_authentication_request(token=scoped_token)
- headers2 = {'X-Subject-Token': self.get_requested_token(auth)}
- self.delete('/auth/tokens', headers=headers)
- self.delete('/auth/tokens', headers=headers2)
-
- events = self.get('/OS-REVOKE/events').json_body['events']
-
- self.assertEqual(2, len(events))
- future = utils.isotime(timeutils.utcnow() +
- datetime.timedelta(seconds=1000))
-
- events = self.get('/OS-REVOKE/events?since=%s' % (future)
- ).json_body['events']
- self.assertEqual(0, len(events))
-
-
-class TestAuthExternalDisabled(test_v3.RestfulTestCase):
- def config_overrides(self):
- super(TestAuthExternalDisabled, self).config_overrides()
- self.config_fixture.config(
- group='auth',
- methods=['password', 'token'])
-
- def test_remote_user_disabled(self):
- api = auth.controllers.Auth()
- remote_user = '%s@%s' % (self.user['name'], self.domain['name'])
- context, auth_info, auth_context = self.build_external_auth_request(
- remote_user)
- self.assertRaises(exception.Unauthorized,
- api.authenticate,
- context,
- auth_info,
- auth_context)
-
-
-class TestAuthExternalDomain(test_v3.RestfulTestCase):
- content_type = 'json'
-
- def config_overrides(self):
- super(TestAuthExternalDomain, self).config_overrides()
- self.kerberos = False
- self.auth_plugin_config_override(external='Domain')
-
- def test_remote_user_with_realm(self):
- api = auth.controllers.Auth()
- remote_user = self.user['name']
- remote_domain = self.domain['name']
- context, auth_info, auth_context = self.build_external_auth_request(
- remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
-
- api.authenticate(context, auth_info, auth_context)
- self.assertEqual(self.user['id'], auth_context['user_id'])
-
- # Now test to make sure the user name can, itself, contain the
- # '@' character.
- user = {'name': 'myname@mydivision'}
- self.identity_api.update_user(self.user['id'], user)
- remote_user = user['name']
- context, auth_info, auth_context = self.build_external_auth_request(
- remote_user, remote_domain=remote_domain, kerberos=self.kerberos)
-
- api.authenticate(context, auth_info, auth_context)
- self.assertEqual(self.user['id'], auth_context['user_id'])
-
- def test_project_id_scoped_with_remote_user(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- auth_data = self.build_authentication_request(
- project_id=self.project['id'],
- kerberos=self.kerberos)
- remote_user = self.user['name']
- remote_domain = self.domain['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'REMOTE_DOMAIN': remote_domain,
- 'AUTH_TYPE': 'Negotiate'})
- r = self.v3_create_token(auth_data)
- token = self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(self.user['name'], token['bind']['kerberos'])
-
- def test_unscoped_bind_with_remote_user(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- auth_data = self.build_authentication_request(kerberos=self.kerberos)
- remote_user = self.user['name']
- remote_domain = self.domain['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'REMOTE_DOMAIN': remote_domain,
- 'AUTH_TYPE': 'Negotiate'})
- r = self.v3_create_token(auth_data)
- token = self.assertValidUnscopedTokenResponse(r)
- self.assertEqual(self.user['name'], token['bind']['kerberos'])
-
-
-class TestAuthExternalDefaultDomain(test_v3.RestfulTestCase):
- content_type = 'json'
-
- def config_overrides(self):
- super(TestAuthExternalDefaultDomain, self).config_overrides()
- self.kerberos = False
- self.auth_plugin_config_override(
- external='keystone.auth.plugins.external.DefaultDomain')
-
- def test_remote_user_with_default_domain(self):
- api = auth.controllers.Auth()
- remote_user = self.default_domain_user['name']
- context, auth_info, auth_context = self.build_external_auth_request(
- remote_user, kerberos=self.kerberos)
-
- api.authenticate(context, auth_info, auth_context)
- self.assertEqual(self.default_domain_user['id'],
- auth_context['user_id'])
-
- # Now test to make sure the user name can, itself, contain the
- # '@' character.
- user = {'name': 'myname@mydivision'}
- self.identity_api.update_user(self.default_domain_user['id'], user)
- remote_user = user['name']
- context, auth_info, auth_context = self.build_external_auth_request(
- remote_user, kerberos=self.kerberos)
-
- api.authenticate(context, auth_info, auth_context)
- self.assertEqual(self.default_domain_user['id'],
- auth_context['user_id'])
-
- def test_project_id_scoped_with_remote_user(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- auth_data = self.build_authentication_request(
- project_id=self.default_domain_project['id'],
- kerberos=self.kerberos)
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- r = self.v3_create_token(auth_data)
- token = self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(self.default_domain_user['name'],
- token['bind']['kerberos'])
-
- def test_unscoped_bind_with_remote_user(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
- auth_data = self.build_authentication_request(kerberos=self.kerberos)
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- r = self.v3_create_token(auth_data)
- token = self.assertValidUnscopedTokenResponse(r)
- self.assertEqual(self.default_domain_user['name'],
- token['bind']['kerberos'])
-
-
-class TestAuthKerberos(TestAuthExternalDomain):
-
- def config_overrides(self):
- super(TestAuthKerberos, self).config_overrides()
- self.kerberos = True
- self.auth_plugin_config_override(
- methods=['kerberos', 'password', 'token'])
-
-
-class TestAuth(test_v3.RestfulTestCase):
-
- def test_unscoped_token_with_user_id(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_unscoped_token_with_user_domain_id(self):
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_id=self.domain['id'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_unscoped_token_with_user_domain_name(self):
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_name=self.domain['name'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_project_id_scoped_token_with_user_id(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(r)
-
- def _second_project_as_default(self):
- ref = unit.new_project_ref(domain_id=self.domain_id)
- r = self.post('/projects', body={'project': ref})
- project = self.assertValidProjectResponse(r, ref)
-
- # grant the user a role on the project
- self.put(
- '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
- 'user_id': self.user['id'],
- 'project_id': project['id'],
- 'role_id': self.role['id']})
-
- # set the user's preferred project
- body = {'user': {'default_project_id': project['id']}}
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': self.user['id']},
- body=body)
- self.assertValidUserResponse(r)
-
- return project
-
- def test_default_project_id_scoped_token_with_user_id(self):
- project = self._second_project_as_default()
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(project['id'], r.result['token']['project']['id'])
-
- def test_default_project_id_scoped_token_with_user_id_no_catalog(self):
- project = self._second_project_as_default()
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True)
- self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
- self.assertEqual(project['id'], r.result['token']['project']['id'])
-
- def test_explicit_unscoped_token(self):
- self._second_project_as_default()
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- unscoped="unscoped")
- r = self.post('/auth/tokens', body=auth_data, noauth=True)
-
- self.assertIsNone(r.result['token'].get('project'))
- self.assertIsNone(r.result['token'].get('domain'))
- self.assertIsNone(r.result['token'].get('scope'))
-
- def test_implicit_project_id_scoped_token_with_user_id_no_catalog(self):
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True)
- self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
- self.assertEqual(self.project['id'],
- r.result['token']['project']['id'])
-
- def test_auth_catalog_attributes(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.v3_create_token(auth_data)
-
- catalog = r.result['token']['catalog']
- self.assertEqual(1, len(catalog))
- catalog = catalog[0]
-
- self.assertEqual(self.service['id'], catalog['id'])
- self.assertEqual(self.service['name'], catalog['name'])
- self.assertEqual(self.service['type'], catalog['type'])
-
- endpoint = catalog['endpoints']
- self.assertEqual(1, len(endpoint))
- endpoint = endpoint[0]
-
- self.assertEqual(self.endpoint['id'], endpoint['id'])
- self.assertEqual(self.endpoint['interface'], endpoint['interface'])
- self.assertEqual(self.endpoint['region_id'], endpoint['region_id'])
- self.assertEqual(self.endpoint['url'], endpoint['url'])
-
- def _check_disabled_endpoint_result(self, catalog, disabled_endpoint_id):
- endpoints = catalog[0]['endpoints']
- endpoint_ids = [ep['id'] for ep in endpoints]
- self.assertEqual([self.endpoint_id], endpoint_ids)
-
- def test_auth_catalog_disabled_service(self):
- """On authenticate, get a catalog that excludes disabled services."""
- # although the child endpoint is enabled, the service is disabled
- self.assertTrue(self.endpoint['enabled'])
- self.catalog_api.update_service(
- self.endpoint['service_id'], {'enabled': False})
- service = self.catalog_api.get_service(self.endpoint['service_id'])
- self.assertFalse(service['enabled'])
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.v3_create_token(auth_data)
-
- self.assertEqual([], r.result['token']['catalog'])
-
- def test_auth_catalog_disabled_endpoint(self):
- """On authenticate, get a catalog that excludes disabled endpoints."""
- # Create a disabled endpoint that's like the enabled one.
- disabled_endpoint_ref = copy.copy(self.endpoint)
- disabled_endpoint_id = uuid.uuid4().hex
- disabled_endpoint_ref.update({
- 'id': disabled_endpoint_id,
- 'enabled': False,
- 'interface': 'internal'
- })
- self.catalog_api.create_endpoint(disabled_endpoint_id,
- disabled_endpoint_ref)
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.v3_create_token(auth_data)
-
- self._check_disabled_endpoint_result(r.result['token']['catalog'],
- disabled_endpoint_id)
-
- def test_project_id_scoped_token_with_user_id_unauthorized(self):
- project = unit.new_project_ref(domain_id=self.domain_id)
- self.resource_api.create_project(project['id'], project)
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=project['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_user_and_group_roles_scoped_token(self):
- """Test correct roles are returned in scoped token.
-
- Test Plan:
-
- - Create a domain, with 1 project, 2 users (user1 and user2)
- and 2 groups (group1 and group2)
- - Make user1 a member of group1, user2 a member of group2
- - Create 8 roles, assigning them to each of the 8 combinations
- of users/groups on domain/project
- - Get a project scoped token for user1, checking that the right
- two roles are returned (one directly assigned, one by virtue
- of group membership)
- - Repeat this for a domain scoped token
- - Make user1 also a member of group2
- - Get another scoped token making sure the additional role
- shows up
- - User2 is just here as a spoiler, to make sure we don't get
- any roles uniquely assigned to it returned in any of our
- tokens
-
- """
- domainA = unit.new_domain_ref()
- self.resource_api.create_domain(domainA['id'], domainA)
- projectA = unit.new_project_ref(domain_id=domainA['id'])
- self.resource_api.create_project(projectA['id'], projectA)
-
- user1 = unit.create_user(self.identity_api, domain_id=domainA['id'])
-
- user2 = unit.create_user(self.identity_api, domain_id=domainA['id'])
-
- group1 = unit.new_group_ref(domain_id=domainA['id'])
- group1 = self.identity_api.create_group(group1)
-
- group2 = unit.new_group_ref(domain_id=domainA['id'])
- group2 = self.identity_api.create_group(group2)
-
- self.identity_api.add_user_to_group(user1['id'],
- group1['id'])
- self.identity_api.add_user_to_group(user2['id'],
- group2['id'])
-
- # Now create all the roles and assign them
- role_list = []
- for _ in range(8):
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- role_list.append(role)
-
- self.assignment_api.create_grant(role_list[0]['id'],
- user_id=user1['id'],
- domain_id=domainA['id'])
- self.assignment_api.create_grant(role_list[1]['id'],
- user_id=user1['id'],
- project_id=projectA['id'])
- self.assignment_api.create_grant(role_list[2]['id'],
- user_id=user2['id'],
- domain_id=domainA['id'])
- self.assignment_api.create_grant(role_list[3]['id'],
- user_id=user2['id'],
- project_id=projectA['id'])
- self.assignment_api.create_grant(role_list[4]['id'],
- group_id=group1['id'],
- domain_id=domainA['id'])
- self.assignment_api.create_grant(role_list[5]['id'],
- group_id=group1['id'],
- project_id=projectA['id'])
- self.assignment_api.create_grant(role_list[6]['id'],
- group_id=group2['id'],
- domain_id=domainA['id'])
- self.assignment_api.create_grant(role_list[7]['id'],
- group_id=group2['id'],
- project_id=projectA['id'])
-
- # First, get a project scoped token - which should
- # contain the direct user role and the one by virtue
- # of group membership
- auth_data = self.build_authentication_request(
- user_id=user1['id'],
- password=user1['password'],
- project_id=projectA['id'])
- r = self.v3_create_token(auth_data)
- token = self.assertValidScopedTokenResponse(r)
- roles_ids = []
- for ref in token['roles']:
- roles_ids.append(ref['id'])
- self.assertEqual(2, len(token['roles']))
- self.assertIn(role_list[1]['id'], roles_ids)
- self.assertIn(role_list[5]['id'], roles_ids)
-
- # Now the same thing for a domain scoped token
- auth_data = self.build_authentication_request(
- user_id=user1['id'],
- password=user1['password'],
- domain_id=domainA['id'])
- r = self.v3_create_token(auth_data)
- token = self.assertValidScopedTokenResponse(r)
- roles_ids = []
- for ref in token['roles']:
- roles_ids.append(ref['id'])
- self.assertEqual(2, len(token['roles']))
- self.assertIn(role_list[0]['id'], roles_ids)
- self.assertIn(role_list[4]['id'], roles_ids)
-
- # Finally, add user1 to the 2nd group, and get a new
- # scoped token - the extra role should now be included
- # by virtue of the 2nd group
- self.identity_api.add_user_to_group(user1['id'],
- group2['id'])
- auth_data = self.build_authentication_request(
- user_id=user1['id'],
- password=user1['password'],
- project_id=projectA['id'])
- r = self.v3_create_token(auth_data)
- token = self.assertValidScopedTokenResponse(r)
- roles_ids = []
- for ref in token['roles']:
- roles_ids.append(ref['id'])
- self.assertEqual(3, len(token['roles']))
- self.assertIn(role_list[1]['id'], roles_ids)
- self.assertIn(role_list[5]['id'], roles_ids)
- self.assertIn(role_list[7]['id'], roles_ids)
-
- def test_auth_token_cross_domain_group_and_project(self):
- """Verify getting a token in cross domain group/project roles."""
- # create domain, project and group and grant roles to user
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- project1 = unit.new_project_ref(domain_id=domain1['id'])
- self.resource_api.create_project(project1['id'], project1)
- user_foo = unit.create_user(self.identity_api,
- domain_id=test_v3.DEFAULT_DOMAIN_ID)
- role_member = unit.new_role_ref()
- self.role_api.create_role(role_member['id'], role_member)
- role_admin = unit.new_role_ref()
- self.role_api.create_role(role_admin['id'], role_admin)
- role_foo_domain1 = unit.new_role_ref()
- self.role_api.create_role(role_foo_domain1['id'], role_foo_domain1)
- role_group_domain1 = unit.new_role_ref()
- self.role_api.create_role(role_group_domain1['id'], role_group_domain1)
- self.assignment_api.add_user_to_project(project1['id'],
- user_foo['id'])
- new_group = unit.new_group_ref(domain_id=domain1['id'])
- new_group = self.identity_api.create_group(new_group)
- self.identity_api.add_user_to_group(user_foo['id'],
- new_group['id'])
- self.assignment_api.create_grant(
- user_id=user_foo['id'],
- project_id=project1['id'],
- role_id=role_member['id'])
- self.assignment_api.create_grant(
- group_id=new_group['id'],
- project_id=project1['id'],
- role_id=role_admin['id'])
- self.assignment_api.create_grant(
- user_id=user_foo['id'],
- domain_id=domain1['id'],
- role_id=role_foo_domain1['id'])
- self.assignment_api.create_grant(
- group_id=new_group['id'],
- domain_id=domain1['id'],
- role_id=role_group_domain1['id'])
-
- # Get a scoped token for the project
- auth_data = self.build_authentication_request(
- username=user_foo['name'],
- user_domain_id=test_v3.DEFAULT_DOMAIN_ID,
- password=user_foo['password'],
- project_name=project1['name'],
- project_domain_id=domain1['id'])
-
- r = self.v3_create_token(auth_data)
- scoped_token = self.assertValidScopedTokenResponse(r)
- project = scoped_token["project"]
- roles_ids = []
- for ref in scoped_token['roles']:
- roles_ids.append(ref['id'])
- self.assertEqual(project1['id'], project["id"])
- self.assertIn(role_member['id'], roles_ids)
- self.assertIn(role_admin['id'], roles_ids)
- self.assertNotIn(role_foo_domain1['id'], roles_ids)
- self.assertNotIn(role_group_domain1['id'], roles_ids)
-
- def test_project_id_scoped_token_with_user_domain_id(self):
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_id=self.domain['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(r)
-
- def test_project_id_scoped_token_with_user_domain_name(self):
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_name=self.domain['name'],
- password=self.user['password'],
- project_id=self.project['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(r)
-
- def test_domain_id_scoped_token_with_user_id(self):
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_id_scoped_token_with_user_domain_id(self):
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
-
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_id=self.domain['id'],
- password=self.user['password'],
- domain_id=self.domain['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_id_scoped_token_with_user_domain_name(self):
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
-
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_name=self.domain['name'],
- password=self.user['password'],
- domain_id=self.domain['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_name_scoped_token_with_user_id(self):
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_name=self.domain['name'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_name_scoped_token_with_user_domain_id(self):
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
-
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_id=self.domain['id'],
- password=self.user['password'],
- domain_name=self.domain['name'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_name_scoped_token_with_user_domain_name(self):
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
-
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_name=self.domain['name'],
- password=self.user['password'],
- domain_name=self.domain['name'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_scope_token_with_group_role(self):
- group = unit.new_group_ref(domain_id=self.domain_id)
- group = self.identity_api.create_group(group)
-
- # add user to group
- self.identity_api.add_user_to_group(self.user['id'], group['id'])
-
- # grant the domain role to group
- path = '/domains/%s/groups/%s/roles/%s' % (
- self.domain['id'], group['id'], self.role['id'])
- self.put(path=path)
-
- # now get a domain-scoped token
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_scope_token_with_name(self):
- # grant the domain role to user
- path = '/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id'])
- self.put(path=path)
- # now get a domain-scoped token
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_name=self.domain['name'])
- r = self.v3_create_token(auth_data)
- self.assertValidDomainScopedTokenResponse(r)
-
- def test_domain_scope_failed(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_auth_with_id(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- token = r.headers.get('X-Subject-Token')
-
- # test token auth
- auth_data = self.build_authentication_request(token=token)
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def get_v2_token(self, tenant_id=None):
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'username': self.default_domain_user['name'],
- 'password': self.default_domain_user['password'],
- },
- },
- }
- r = self.admin_request(method='POST', path='/v2.0/tokens', body=body)
- return r
-
- def test_validate_v2_unscoped_token_with_v3_api(self):
- v2_token = self.get_v2_token().result['access']['token']['id']
- auth_data = self.build_authentication_request(token=v2_token)
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_validate_v2_scoped_token_with_v3_api(self):
- v2_response = self.get_v2_token(
- tenant_id=self.default_domain_project['id'])
- result = v2_response.result
- v2_token = result['access']['token']['id']
- auth_data = self.build_authentication_request(
- token=v2_token,
- project_id=self.default_domain_project['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidScopedTokenResponse(r)
-
- def test_invalid_user_id(self):
- auth_data = self.build_authentication_request(
- user_id=uuid.uuid4().hex,
- password=self.user['password'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_invalid_user_name(self):
- auth_data = self.build_authentication_request(
- username=uuid.uuid4().hex,
- user_domain_id=self.domain['id'],
- password=self.user['password'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_invalid_domain_id(self):
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_id=uuid.uuid4().hex,
- password=self.user['password'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_invalid_domain_name(self):
- auth_data = self.build_authentication_request(
- username=self.user['name'],
- user_domain_name=uuid.uuid4().hex,
- password=self.user['password'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_invalid_password(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=uuid.uuid4().hex)
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_remote_user_no_realm(self):
- api = auth.controllers.Auth()
- context, auth_info, auth_context = self.build_external_auth_request(
- self.default_domain_user['name'])
- api.authenticate(context, auth_info, auth_context)
- self.assertEqual(self.default_domain_user['id'],
- auth_context['user_id'])
- # Now test to make sure the user name can, itself, contain the
- # '@' character.
- user = {'name': 'myname@mydivision'}
- self.identity_api.update_user(self.default_domain_user['id'], user)
- context, auth_info, auth_context = self.build_external_auth_request(
- user["name"])
- api.authenticate(context, auth_info, auth_context)
- self.assertEqual(self.default_domain_user['id'],
- auth_context['user_id'])
-
- def test_remote_user_no_domain(self):
- api = auth.controllers.Auth()
- context, auth_info, auth_context = self.build_external_auth_request(
- self.user['name'])
- self.assertRaises(exception.Unauthorized,
- api.authenticate,
- context,
- auth_info,
- auth_context)
-
- def test_remote_user_and_password(self):
- # both REMOTE_USER and password methods must pass.
- # note that they do not have to match
- api = auth.controllers.Auth()
- auth_data = self.build_authentication_request(
- user_domain_id=self.default_domain_user['domain_id'],
- username=self.default_domain_user['name'],
- password=self.default_domain_user['password'])['auth']
- context, auth_info, auth_context = self.build_external_auth_request(
- self.default_domain_user['name'], auth_data=auth_data)
-
- api.authenticate(context, auth_info, auth_context)
-
- def test_remote_user_and_explicit_external(self):
- # both REMOTE_USER and password methods must pass.
- # note that they do not have to match
- auth_data = self.build_authentication_request(
- user_domain_id=self.domain['id'],
- username=self.user['name'],
- password=self.user['password'])['auth']
- auth_data['identity']['methods'] = ["password", "external"]
- auth_data['identity']['external'] = {}
- api = auth.controllers.Auth()
- auth_info = auth.controllers.AuthInfo(None, auth_data)
- auth_context = {'extras': {}, 'method_names': []}
- self.assertRaises(exception.Unauthorized,
- api.authenticate,
- self.empty_context,
- auth_info,
- auth_context)
-
- def test_remote_user_bad_password(self):
- # both REMOTE_USER and password methods must pass.
- api = auth.controllers.Auth()
- auth_data = self.build_authentication_request(
- user_domain_id=self.domain['id'],
- username=self.user['name'],
- password='badpassword')['auth']
- context, auth_info, auth_context = self.build_external_auth_request(
- self.default_domain_user['name'], auth_data=auth_data)
- self.assertRaises(exception.Unauthorized,
- api.authenticate,
- context,
- auth_info,
- auth_context)
-
- def test_bind_not_set_with_remote_user(self):
- self.config_fixture.config(group='token', bind=[])
- auth_data = self.build_authentication_request()
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- r = self.v3_create_token(auth_data)
- token = self.assertValidUnscopedTokenResponse(r)
- self.assertNotIn('bind', token)
-
- # TODO(ayoung): move to TestPKITokenAPIs; it will be run for both formats
- def test_verify_with_bound_token(self):
- self.config_fixture.config(group='token', bind='kerberos')
- auth_data = self.build_authentication_request(
- project_id=self.project['id'])
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
-
- token = self.get_requested_token(auth_data)
- headers = {'X-Subject-Token': token}
- r = self.get('/auth/tokens', headers=headers, token=token)
- token = self.assertValidProjectScopedTokenResponse(r)
- self.assertEqual(self.default_domain_user['name'],
- token['bind']['kerberos'])
-
- def test_auth_with_bind_token(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
-
- auth_data = self.build_authentication_request()
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- r = self.v3_create_token(auth_data)
-
- # the unscoped token should have bind information in it
- token = self.assertValidUnscopedTokenResponse(r)
- self.assertEqual(remote_user, token['bind']['kerberos'])
-
- token = r.headers.get('X-Subject-Token')
-
- # using unscoped token with remote user succeeds
- auth_params = {'token': token, 'project_id': self.project_id}
- auth_data = self.build_authentication_request(**auth_params)
- r = self.v3_create_token(auth_data)
- token = self.assertValidProjectScopedTokenResponse(r)
-
- # the bind information should be carried over from the original token
- self.assertEqual(remote_user, token['bind']['kerberos'])
-
- def test_v2_v3_bind_token_intermix(self):
- self.config_fixture.config(group='token', bind='kerberos')
-
- # we need our own user registered to the default domain because of
- # the way external auth works.
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- body = {'auth': {}}
- resp = self.admin_request(path='/v2.0/tokens',
- method='POST',
- body=body)
-
- v2_token_data = resp.result
-
- bind = v2_token_data['access']['token']['bind']
- self.assertEqual(self.default_domain_user['name'], bind['kerberos'])
-
- v2_token_id = v2_token_data['access']['token']['id']
- # NOTE(gyee): self.get() will try to obtain an auth token if one
- # is not provided. When REMOTE_USER is present in the request
- # environment, the external user auth plugin is used in conjunction
- # with the password auth for the admin user. Therefore, we need to
- # cleanup the REMOTE_USER information from the previous call.
- del self.admin_app.extra_environ['REMOTE_USER']
- headers = {'X-Subject-Token': v2_token_id}
- resp = self.get('/auth/tokens', headers=headers)
- token_data = resp.result
-
- self.assertDictEqual(v2_token_data['access']['token']['bind'],
- token_data['token']['bind'])
-
- def test_authenticating_a_user_with_no_password(self):
- user = unit.new_user_ref(domain_id=self.domain['id'])
- del user['password'] # can't have a password for this test
- user = self.identity_api.create_user(user)
-
- auth_data = self.build_authentication_request(
- user_id=user['id'],
- password='password')
-
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_disabled_default_project_result_in_unscoped_token(self):
- # create a disabled project to work with
- project = self.create_new_default_project_for_user(
- self.user['id'], self.domain_id, enable_project=False)
-
- # assign a role to user for the new project
- self.assignment_api.add_role_to_user_and_project(self.user['id'],
- project['id'],
- self.role_id)
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_disabled_default_project_domain_result_in_unscoped_token(self):
- domain_ref = unit.new_domain_ref()
- r = self.post('/domains', body={'domain': domain_ref})
- domain = self.assertValidDomainResponse(r, domain_ref)
-
- project = self.create_new_default_project_for_user(
- self.user['id'], domain['id'])
-
- # assign a role to user for the new project
- self.assignment_api.add_role_to_user_and_project(self.user['id'],
- project['id'],
- self.role_id)
-
- # now disable the project domain
- body = {'domain': {'enabled': False}}
- r = self.patch('/domains/%(domain_id)s' % {'domain_id': domain['id']},
- body=body)
- self.assertValidDomainResponse(r)
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_no_access_to_default_project_result_in_unscoped_token(self):
- # create a disabled project to work with
- self.create_new_default_project_for_user(self.user['id'],
- self.domain_id)
-
- # attempt to authenticate without requesting a project
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- r = self.v3_create_token(auth_data)
- self.assertValidUnscopedTokenResponse(r)
-
- def test_disabled_scope_project_domain_result_in_401(self):
- # create a disabled domain
- domain = unit.new_domain_ref()
- domain = self.resource_api.create_domain(domain['id'], domain)
-
- # create a project in the domain
- project = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project['id'], project)
-
- # assign some role to self.user for the project in the domain
- self.assignment_api.add_role_to_user_and_project(
- self.user['id'],
- project['id'],
- self.role_id)
-
- # Disable the domain
- domain['enabled'] = False
- self.resource_api.update_domain(domain['id'], domain)
-
- # user should not be able to auth with project_id
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=project['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- # user should not be able to auth with project_name & domain
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_name=project['name'],
- project_domain_id=domain['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_auth_methods_with_different_identities_fails(self):
- # get the token for a user. This is self.user which is different from
- # self.default_domain_user.
- token = self.get_scoped_token()
- # try both password and token methods with different identities and it
- # should fail
- auth_data = self.build_authentication_request(
- token=token,
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_authenticate_fails_if_project_unsafe(self):
- """Verify authenticate to a project with unsafe name fails."""
- # Start with url name restrictions off, so we can create the unsafe
- # named project
- self.config_fixture.config(group='resource',
- project_name_url_safe='off')
- unsafe_name = 'i am not / safe'
- project = unit.new_project_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID,
- name=unsafe_name)
- self.resource_api.create_project(project['id'], project)
- role_member = unit.new_role_ref()
- self.role_api.create_role(role_member['id'], role_member)
- self.assignment_api.add_role_to_user_and_project(
- self.user['id'], project['id'], role_member['id'])
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_name=project['name'],
- project_domain_id=test_v3.DEFAULT_DOMAIN_ID)
-
- # Since name url restriction is off, we should be able to autenticate
- self.v3_create_token(auth_data)
-
- # Set the name url restriction to new, which should still allow us to
- # authenticate
- self.config_fixture.config(group='resource',
- project_name_url_safe='new')
- self.v3_create_token(auth_data)
-
- # Set the name url restriction to strict and we should fail to
- # authenticate
- self.config_fixture.config(group='resource',
- project_name_url_safe='strict')
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_authenticate_fails_if_domain_unsafe(self):
- """Verify authenticate to a domain with unsafe name fails."""
- # Start with url name restrictions off, so we can create the unsafe
- # named domain
- self.config_fixture.config(group='resource',
- domain_name_url_safe='off')
- unsafe_name = 'i am not / safe'
- domain = unit.new_domain_ref(name=unsafe_name)
- self.resource_api.create_domain(domain['id'], domain)
- role_member = unit.new_role_ref()
- self.role_api.create_role(role_member['id'], role_member)
- self.assignment_api.create_grant(
- role_member['id'],
- user_id=self.user['id'],
- domain_id=domain['id'])
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_name=domain['name'])
-
- # Since name url restriction is off, we should be able to autenticate
- self.v3_create_token(auth_data)
-
- # Set the name url restriction to new, which should still allow us to
- # authenticate
- self.config_fixture.config(group='resource',
- project_name_url_safe='new')
- self.v3_create_token(auth_data)
-
- # Set the name url restriction to strict and we should fail to
- # authenticate
- self.config_fixture.config(group='resource',
- domain_name_url_safe='strict')
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_authenticate_fails_to_project_if_domain_unsafe(self):
- """Verify authenticate to a project using unsafe domain name fails."""
- # Start with url name restrictions off, so we can create the unsafe
- # named domain
- self.config_fixture.config(group='resource',
- domain_name_url_safe='off')
- unsafe_name = 'i am not / safe'
- domain = unit.new_domain_ref(name=unsafe_name)
- self.resource_api.create_domain(domain['id'], domain)
- # Add a (safely named) project to that domain
- project = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project['id'], project)
- role_member = unit.new_role_ref()
- self.role_api.create_role(role_member['id'], role_member)
- self.assignment_api.create_grant(
- role_member['id'],
- user_id=self.user['id'],
- project_id=project['id'])
-
- # An auth request via project ID, but specifying domain by name
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_name=project['name'],
- project_domain_name=domain['name'])
-
- # Since name url restriction is off, we should be able to autenticate
- self.v3_create_token(auth_data)
-
- # Set the name url restriction to new, which should still allow us to
- # authenticate
- self.config_fixture.config(group='resource',
- project_name_url_safe='new')
- self.v3_create_token(auth_data)
-
- # Set the name url restriction to strict and we should fail to
- # authenticate
- self.config_fixture.config(group='resource',
- domain_name_url_safe='strict')
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
-
-class TestAuthJSONExternal(test_v3.RestfulTestCase):
- content_type = 'json'
-
- def auth_plugin_config_override(self, methods=None, **method_classes):
- self.config_fixture.config(group='auth', methods=[])
-
- def test_remote_user_no_method(self):
- api = auth.controllers.Auth()
- context, auth_info, auth_context = self.build_external_auth_request(
- self.default_domain_user['name'])
- self.assertRaises(exception.Unauthorized,
- api.authenticate,
- context,
- auth_info,
- auth_context)
-
-
-class TestTrustOptional(test_v3.RestfulTestCase):
- def config_overrides(self):
- super(TestTrustOptional, self).config_overrides()
- self.config_fixture.config(group='trust', enabled=False)
-
- def test_trusts_returns_not_found(self):
- self.get('/OS-TRUST/trusts', body={'trust': {}},
- expected_status=http_client.NOT_FOUND)
- self.post('/OS-TRUST/trusts', body={'trust': {}},
- expected_status=http_client.NOT_FOUND)
-
- def test_auth_with_scope_in_trust_forbidden(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- trust_id=uuid.uuid4().hex)
- self.v3_create_token(auth_data,
- expected_status=http_client.FORBIDDEN)
-
-
-class TrustAPIBehavior(test_v3.RestfulTestCase):
- """Redelegation valid and secure
-
- Redelegation is a hierarchical structure of trusts between initial trustor
- and a group of users allowed to impersonate trustor and act in his name.
- Hierarchy is created in a process of trusting already trusted permissions
- and organized as an adjacency list using 'redelegated_trust_id' field.
- Redelegation is valid if each subsequent trust in a chain passes 'not more'
- permissions than being redelegated.
-
- Trust constraints are:
- * roles - set of roles trusted by trustor
- * expiration_time
- * allow_redelegation - a flag
- * redelegation_count - decreasing value restricting length of trust chain
- * remaining_uses - DISALLOWED when allow_redelegation == True
-
- Trust becomes invalid in case:
- * trust roles were revoked from trustor
- * one of the users in the delegation chain was disabled or deleted
- * expiration time passed
- * one of the parent trusts has become invalid
- * one of the parent trusts was deleted
-
- """
-
- def config_overrides(self):
- super(TrustAPIBehavior, self).config_overrides()
- self.config_fixture.config(
- group='trust',
- enabled=True,
- allow_redelegation=True,
- max_redelegation_count=10
- )
-
- def setUp(self):
- super(TrustAPIBehavior, self).setUp()
- # Create a trustee to delegate stuff to
- self.trustee_user = unit.create_user(self.identity_api,
- domain_id=self.domain_id)
-
- # trustor->trustee
- self.redelegated_trust_ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id],
- allow_redelegation=True)
-
- # trustor->trustee (no redelegation)
- self.chained_trust_ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- role_ids=[self.role_id],
- allow_redelegation=True)
-
- def _get_trust_token(self, trust):
- trust_id = trust['id']
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust_id)
- trust_token = self.get_requested_token(auth_data)
- return trust_token
-
- def test_depleted_redelegation_count_error(self):
- self.redelegated_trust_ref['redelegation_count'] = 0
- r = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref})
- trust = self.assertValidTrustResponse(r)
- trust_token = self._get_trust_token(trust)
-
- # Attempt to create a redelegated trust.
- self.post('/OS-TRUST/trusts',
- body={'trust': self.chained_trust_ref},
- token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
- def test_modified_redelegation_count_error(self):
- r = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref})
- trust = self.assertValidTrustResponse(r)
- trust_token = self._get_trust_token(trust)
-
- # Attempt to create a redelegated trust with incorrect
- # redelegation_count.
- correct = trust['redelegation_count'] - 1
- incorrect = correct - 1
- self.chained_trust_ref['redelegation_count'] = incorrect
- self.post('/OS-TRUST/trusts',
- body={'trust': self.chained_trust_ref},
- token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
- def test_max_redelegation_count_constraint(self):
- incorrect = CONF.trust.max_redelegation_count + 1
- self.redelegated_trust_ref['redelegation_count'] = incorrect
- self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref},
- expected_status=http_client.FORBIDDEN)
-
- def test_redelegation_expiry(self):
- r = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref})
- trust = self.assertValidTrustResponse(r)
- trust_token = self._get_trust_token(trust)
-
- # Attempt to create a redelegated trust supposed to last longer
- # than the parent trust: let's give it 10 minutes (>1 minute).
- too_long_live_chained_trust_ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=10),
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts',
- body={'trust': too_long_live_chained_trust_ref},
- token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
- def test_redelegation_remaining_uses(self):
- r = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref})
- trust = self.assertValidTrustResponse(r)
- trust_token = self._get_trust_token(trust)
-
- # Attempt to create a redelegated trust with remaining_uses defined.
- # It must fail according to specification: remaining_uses must be
- # omitted for trust redelegation. Any number here.
- self.chained_trust_ref['remaining_uses'] = 5
- self.post('/OS-TRUST/trusts',
- body={'trust': self.chained_trust_ref},
- token=trust_token,
- expected_status=http_client.BAD_REQUEST)
-
- def test_roles_subset(self):
- # Build second role
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- # assign a new role to the user
- self.assignment_api.create_grant(role_id=role['id'],
- user_id=self.user_id,
- project_id=self.project_id)
-
- # Create first trust with extended set of roles
- ref = self.redelegated_trust_ref
- ref['expires_at'] = datetime.datetime.utcnow().replace(
- year=2032).strftime(unit.TIME_FORMAT)
- ref['roles'].append({'id': role['id']})
- r = self.post('/OS-TRUST/trusts',
- body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
- # Trust created with exact set of roles (checked by role id)
- role_id_set = set(r['id'] for r in ref['roles'])
- trust_role_id_set = set(r['id'] for r in trust['roles'])
- self.assertEqual(role_id_set, trust_role_id_set)
-
- trust_token = self._get_trust_token(trust)
-
- # Chain second trust with roles subset
- self.chained_trust_ref['expires_at'] = (
- datetime.datetime.utcnow().replace(year=2028).strftime(
- unit.TIME_FORMAT))
- r = self.post('/OS-TRUST/trusts',
- body={'trust': self.chained_trust_ref},
- token=trust_token)
- trust2 = self.assertValidTrustResponse(r)
- # First trust contains roles superset
- # Second trust contains roles subset
- role_id_set1 = set(r['id'] for r in trust['roles'])
- role_id_set2 = set(r['id'] for r in trust2['roles'])
- self.assertThat(role_id_set1, matchers.GreaterThan(role_id_set2))
-
- def test_redelegate_with_role_by_name(self):
- # For role by name testing
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_names=[self.role['name']],
- allow_redelegation=True)
- ref['expires_at'] = datetime.datetime.utcnow().replace(
- year=2032).strftime(unit.TIME_FORMAT)
- r = self.post('/OS-TRUST/trusts',
- body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
- # Ensure we can get a token with this trust
- trust_token = self._get_trust_token(trust)
- # Chain second trust with roles subset
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- role_names=[self.role['name']],
- allow_redelegation=True)
- ref['expires_at'] = datetime.datetime.utcnow().replace(
- year=2028).strftime(unit.TIME_FORMAT)
- r = self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- token=trust_token)
- trust = self.assertValidTrustResponse(r)
- # Ensure we can get a token with this trust
- self._get_trust_token(trust)
-
- def test_redelegate_new_role_fails(self):
- r = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref})
- trust = self.assertValidTrustResponse(r)
- trust_token = self._get_trust_token(trust)
-
- # Build second trust with a role not in parent's roles
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
- # assign a new role to the user
- self.assignment_api.create_grant(role_id=role['id'],
- user_id=self.user_id,
- project_id=self.project_id)
-
- # Try to chain a trust with the role not from parent trust
- self.chained_trust_ref['roles'] = [{'id': role['id']}]
-
- # Bypass policy enforcement
- with mock.patch.object(rules, 'enforce', return_value=True):
- self.post('/OS-TRUST/trusts',
- body={'trust': self.chained_trust_ref},
- token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
- def test_redelegation_terminator(self):
- self.redelegated_trust_ref['expires_at'] = (
- datetime.datetime.utcnow().replace(year=2032).strftime(
- unit.TIME_FORMAT))
- r = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref})
- trust = self.assertValidTrustResponse(r)
- trust_token = self._get_trust_token(trust)
-
- # Build second trust - the terminator
- self.chained_trust_ref['expires_at'] = (
- datetime.datetime.utcnow().replace(year=2028).strftime(
- unit.TIME_FORMAT))
- ref = dict(self.chained_trust_ref,
- redelegation_count=1,
- allow_redelegation=False)
-
- r = self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- token=trust_token)
-
- trust = self.assertValidTrustResponse(r)
- # Check that allow_redelegation == False caused redelegation_count
- # to be set to 0, while allow_redelegation is removed
- self.assertNotIn('allow_redelegation', trust)
- self.assertEqual(0, trust['redelegation_count'])
- trust_token = self._get_trust_token(trust)
-
- # Build third trust, same as second
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
- def test_redelegation_without_impersonation(self):
- # Update trust to not allow impersonation
- self.redelegated_trust_ref['impersonation'] = False
-
- # Create trust
- resp = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref},
- expected_status=http_client.CREATED)
- trust = self.assertValidTrustResponse(resp)
-
- # Get trusted token without impersonation
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- trust_token = self.get_requested_token(auth_data)
-
- # Create second user for redelegation
- trustee_user_2 = unit.create_user(self.identity_api,
- domain_id=self.domain_id)
-
- # Trust for redelegation
- trust_ref_2 = unit.new_trust_ref(
- trustor_user_id=self.trustee_user['id'],
- trustee_user_id=trustee_user_2['id'],
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id],
- allow_redelegation=False)
-
- # Creating a second trust should not be allowed since trustor does not
- # have the role to delegate thus returning 404 NOT FOUND.
- resp = self.post('/OS-TRUST/trusts',
- body={'trust': trust_ref_2},
- token=trust_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_create_unscoped_trust(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- self.assertValidTrustResponse(r, ref)
-
- def test_create_trust_no_roles(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id)
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.FORBIDDEN)
-
- def _initialize_test_consume_trust(self, count):
- # Make sure remaining_uses is decremented as we consume the trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- remaining_uses=count,
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- # make sure the trust exists
- trust = self.assertValidTrustResponse(r, ref)
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
- # get a token for the trustee
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'])
- r = self.v3_create_token(auth_data)
- token = r.headers.get('X-Subject-Token')
- # get a trust token, consume one use
- auth_data = self.build_authentication_request(
- token=token,
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- return trust
-
- def test_consume_trust_once(self):
- trust = self._initialize_test_consume_trust(2)
- # check decremented value
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
- trust = r.result.get('trust')
- self.assertIsNotNone(trust)
- self.assertEqual(1, trust['remaining_uses'])
- # FIXME(lbragstad): Assert the role that is returned is the right role.
-
- def test_create_one_time_use_trust(self):
- trust = self._initialize_test_consume_trust(1)
- # No more uses, the trust is made unavailable
- self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=http_client.NOT_FOUND)
- # this time we can't get a trust token
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_create_unlimited_use_trust(self):
- # by default trusts are unlimited in terms of tokens that can be
- # generated from them, this test creates such a trust explicitly
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- remaining_uses=None,
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r, ref)
-
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'])
- r = self.v3_create_token(auth_data)
- token = r.headers.get('X-Subject-Token')
- auth_data = self.build_authentication_request(
- token=token,
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
- trust = r.result.get('trust')
- self.assertIsNone(trust['remaining_uses'])
-
- def test_impersonation_token_cannot_create_new_trust(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
-
- trust_token = self.get_requested_token(auth_data)
-
- # Build second trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
- def test_trust_deleted_grant(self):
- # create a new role
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- grant_url = (
- '/projects/%(project_id)s/users/%(user_id)s/'
- 'roles/%(role_id)s' % {
- 'project_id': self.project_id,
- 'user_id': self.user_id,
- 'role_id': role['id']})
-
- # assign a new role
- self.put(grant_url)
-
- # create a trust that delegates the new role
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[role['id']])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- # delete the grant
- self.delete(grant_url)
-
- # attempt to get a trust token with the deleted grant
- # and ensure it's unauthorized
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data,
- expected_status=http_client.FORBIDDEN)
-
- def test_trust_chained(self):
- """Test that a trust token can't be used to execute another trust.
-
- To do this, we create an A->B->C hierarchy of trusts, then attempt to
- execute the trusts in series (C->B->A).
-
- """
- # create a sub-trustee user
- sub_trustee_user = unit.create_user(
- self.identity_api,
- domain_id=test_v3.DEFAULT_DOMAIN_ID)
- sub_trustee_user_id = sub_trustee_user['id']
-
- # create a new role
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- # assign the new role to trustee
- self.put(
- '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
- 'project_id': self.project_id,
- 'user_id': self.trustee_user['id'],
- 'role_id': role['id']})
-
- # create a trust from trustor -> trustee
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust1 = self.assertValidTrustResponse(r)
-
- # authenticate as trustee so we can create a second trust
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- project_id=self.project_id)
- token = self.get_requested_token(auth_data)
-
- # create a trust from trustee -> sub-trustee
- ref = unit.new_trust_ref(
- trustor_user_id=self.trustee_user['id'],
- trustee_user_id=sub_trustee_user_id,
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[role['id']])
- r = self.post('/OS-TRUST/trusts', token=token, body={'trust': ref})
- trust2 = self.assertValidTrustResponse(r)
-
- # authenticate as sub-trustee and get a trust token
- auth_data = self.build_authentication_request(
- user_id=sub_trustee_user['id'],
- password=sub_trustee_user['password'],
- trust_id=trust2['id'])
- trust_token = self.get_requested_token(auth_data)
-
- # attempt to get the second trust using a trust token
- auth_data = self.build_authentication_request(
- token=trust_token,
- trust_id=trust1['id'])
- r = self.v3_create_token(auth_data,
- expected_status=http_client.FORBIDDEN)
-
- def assertTrustTokensRevoked(self, trust_id):
- revocation_response = self.get('/OS-REVOKE/events')
- revocation_events = revocation_response.json_body['events']
- found = False
- for event in revocation_events:
- if event.get('OS-TRUST:trust_id') == trust_id:
- found = True
- self.assertTrue(found, 'event with trust_id %s not found in list' %
- trust_id)
-
- def test_delete_trust_revokes_tokens(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
- trust_id = trust['id']
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust_id)
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(
- r, self.trustee_user)
- trust_token = r.headers['X-Subject-Token']
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust_id})
- headers = {'X-Subject-Token': trust_token}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
- self.assertTrustTokensRevoked(trust_id)
-
- def disable_user(self, user):
- user['enabled'] = False
- self.identity_api.update_user(user['id'], user)
-
- def test_trust_get_token_fails_if_trustor_disabled(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
-
- trust = self.assertValidTrustResponse(r, ref)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- self.v3_create_token(auth_data)
-
- self.disable_user(self.user)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.FORBIDDEN)
-
- def test_trust_get_token_fails_if_trustee_disabled(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
-
- trust = self.assertValidTrustResponse(r, ref)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- self.v3_create_token(auth_data)
-
- self.disable_user(self.trustee_user)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_delete_trust(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
-
- trust = self.assertValidTrustResponse(r, ref)
-
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust['id']})
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_change_password_invalidates_trust_tokens(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
-
- self.assertValidProjectScopedTokenResponse(r, self.user)
- trust_token = r.headers.get('X-Subject-Token')
-
- self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
- self.user_id, token=trust_token)
-
- self.assertValidUserResponse(
- self.patch('/users/%s' % self.trustee_user['id'],
- body={'user': {'password': uuid.uuid4().hex}}))
-
- self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
- self.user_id, expected_status=http_client.UNAUTHORIZED,
- token=trust_token)
-
- def test_trustee_can_do_role_ops(self):
- resp = self.post('/OS-TRUST/trusts',
- body={'trust': self.redelegated_trust_ref})
- trust = self.assertValidTrustResponse(resp)
- trust_token = self._get_trust_token(trust)
-
- resp = self.get(
- '/OS-TRUST/trusts/%(trust_id)s/roles' % {
- 'trust_id': trust['id']},
- token=trust_token)
- self.assertValidRoleListResponse(resp, self.role)
-
- self.head(
- '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
- 'trust_id': trust['id'],
- 'role_id': self.role['id']},
- token=trust_token,
- expected_status=http_client.OK)
-
- resp = self.get(
- '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
- 'trust_id': trust['id'],
- 'role_id': self.role['id']},
- token=trust_token)
- self.assertValidRoleResponse(resp, self.role)
-
- def test_do_not_consume_remaining_uses_when_get_token_fails(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user['id'],
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id],
- remaining_uses=3)
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
-
- new_trust = r.result.get('trust')
- trust_id = new_trust.get('id')
- # Pass in another user's ID as the trustee, the result being a failed
- # token authenticate and the remaining_uses of the trust should not be
- # decremented.
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- trust_id=trust_id)
- self.v3_create_token(auth_data,
- expected_status=http_client.FORBIDDEN)
-
- r = self.get('/OS-TRUST/trusts/%s' % trust_id)
- self.assertEqual(3, r.result.get('trust').get('remaining_uses'))
-
-
-class TestTrustChain(test_v3.RestfulTestCase):
-
- def config_overrides(self):
- super(TestTrustChain, self).config_overrides()
- self.config_fixture.config(
- group='trust',
- enabled=True,
- allow_redelegation=True,
- max_redelegation_count=10
- )
-
- def setUp(self):
- super(TestTrustChain, self).setUp()
- """Create a trust chain using redelegation.
-
- A trust chain is a series of trusts that are redelegated. For example,
- self.user_list consists of userA, userB, and userC. The first trust in
- the trust chain is going to be established between self.user and userA,
- call it trustA. Then, userA is going to obtain a trust scoped token
- using trustA, and with that token create a trust between userA and
- userB called trustB. This pattern will continue with userB creating a
- trust with userC.
- So the trust chain should look something like:
- trustA -> trustB -> trustC
- Where:
- self.user is trusting userA with trustA
- userA is trusting userB with trustB
- userB is trusting userC with trustC
-
- """
- self.user_list = list()
- self.trust_chain = list()
- for _ in range(3):
- user = unit.create_user(self.identity_api,
- domain_id=self.domain_id)
- self.user_list.append(user)
-
- # trustor->trustee redelegation with impersonation
- trustee = self.user_list[0]
- trust_ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=trustee['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id],
- allow_redelegation=True,
- redelegation_count=3)
-
- # Create a trust between self.user and the first user in the list
- r = self.post('/OS-TRUST/trusts',
- body={'trust': trust_ref})
-
- trust = self.assertValidTrustResponse(r)
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'],
- trust_id=trust['id'])
-
- # Generate a trusted token for the first user
- trust_token = self.get_requested_token(auth_data)
- self.trust_chain.append(trust)
-
- # Loop through the user to create a chain of redelegated trust.
- for next_trustee in self.user_list[1:]:
- trust_ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=next_trustee['id'],
- project_id=self.project_id,
- impersonation=True,
- role_ids=[self.role_id],
- allow_redelegation=True)
- r = self.post('/OS-TRUST/trusts',
- body={'trust': trust_ref},
- token=trust_token)
- trust = self.assertValidTrustResponse(r)
- auth_data = self.build_authentication_request(
- user_id=next_trustee['id'],
- password=next_trustee['password'],
- trust_id=trust['id'])
- trust_token = self.get_requested_token(auth_data)
- self.trust_chain.append(trust)
-
- trustee = self.user_list[-1]
- trust = self.trust_chain[-1]
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'],
- trust_id=trust['id'])
-
- self.last_token = self.get_requested_token(auth_data)
-
- def assert_user_authenticate(self, user):
- auth_data = self.build_authentication_request(
- user_id=user['id'],
- password=user['password']
- )
- r = self.v3_create_token(auth_data)
- self.assertValidTokenResponse(r)
-
- def assert_trust_tokens_revoked(self, trust_id):
- trustee = self.user_list[0]
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password']
- )
- r = self.v3_create_token(auth_data)
- self.assertValidTokenResponse(r)
-
- revocation_response = self.get('/OS-REVOKE/events')
- revocation_events = revocation_response.json_body['events']
- found = False
- for event in revocation_events:
- if event.get('OS-TRUST:trust_id') == trust_id:
- found = True
- self.assertTrue(found, 'event with trust_id %s not found in list' %
- trust_id)
-
- def test_delete_trust_cascade(self):
- self.assert_user_authenticate(self.user_list[0])
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': self.trust_chain[0]['id']})
-
- headers = {'X-Subject-Token': self.last_token}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
- self.assert_trust_tokens_revoked(self.trust_chain[0]['id'])
-
- def test_delete_broken_chain(self):
- self.assert_user_authenticate(self.user_list[0])
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': self.trust_chain[0]['id']})
-
- # Verify the two remaining trust have been deleted
- for i in range(len(self.user_list) - 1):
- auth_data = self.build_authentication_request(
- user_id=self.user_list[i]['id'],
- password=self.user_list[i]['password'])
-
- auth_token = self.get_requested_token(auth_data)
-
- # Assert chained trust have been deleted
- self.get('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': self.trust_chain[i + 1]['id']},
- token=auth_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_trustor_roles_revoked(self):
- self.assert_user_authenticate(self.user_list[0])
-
- self.assignment_api.remove_role_from_user_and_project(
- self.user_id, self.project_id, self.role_id
- )
-
- # Verify that users are not allowed to authenticate with trust
- for i in range(len(self.user_list[1:])):
- trustee = self.user_list[i]
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'])
-
- # Attempt to authenticate with trust
- token = self.get_requested_token(auth_data)
- auth_data = self.build_authentication_request(
- token=token,
- trust_id=self.trust_chain[i - 1]['id'])
-
- # Trustee has no delegated roles
- self.v3_create_token(auth_data,
- expected_status=http_client.FORBIDDEN)
-
- def test_intermediate_user_disabled(self):
- self.assert_user_authenticate(self.user_list[0])
-
- disabled = self.user_list[0]
- disabled['enabled'] = False
- self.identity_api.update_user(disabled['id'], disabled)
-
- # Bypass policy enforcement
- with mock.patch.object(rules, 'enforce', return_value=True):
- headers = {'X-Subject-Token': self.last_token}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.FORBIDDEN)
-
- def test_intermediate_user_deleted(self):
- self.assert_user_authenticate(self.user_list[0])
-
- self.identity_api.delete_user(self.user_list[0]['id'])
-
- # Bypass policy enforcement
- with mock.patch.object(rules, 'enforce', return_value=True):
- headers = {'X-Subject-Token': self.last_token}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.FORBIDDEN)
-
-
-class TestAPIProtectionWithoutAuthContextMiddleware(test_v3.RestfulTestCase):
- def test_api_protection_with_no_auth_context_in_env(self):
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.project['id'])
- token = self.get_requested_token(auth_data)
- auth_controller = auth.controllers.Auth()
- # all we care is that auth context is not in the environment and
- # 'token_id' is used to build the auth context instead
- context = {'subject_token_id': token,
- 'token_id': token,
- 'query_string': {},
- 'environment': {}}
- r = auth_controller.validate_token(context)
- self.assertEqual(http_client.OK, r.status_code)
-
-
-class TestAuthContext(unit.TestCase):
- def setUp(self):
- super(TestAuthContext, self).setUp()
- self.auth_context = auth.controllers.AuthContext()
-
- def test_pick_lowest_expires_at(self):
- expires_at_1 = utils.isotime(timeutils.utcnow())
- expires_at_2 = utils.isotime(timeutils.utcnow() +
- datetime.timedelta(seconds=10))
- # make sure auth_context picks the lowest value
- self.auth_context['expires_at'] = expires_at_1
- self.auth_context['expires_at'] = expires_at_2
- self.assertEqual(expires_at_1, self.auth_context['expires_at'])
-
- def test_identity_attribute_conflict(self):
- for identity_attr in auth.controllers.AuthContext.IDENTITY_ATTRIBUTES:
- self.auth_context[identity_attr] = uuid.uuid4().hex
- if identity_attr == 'expires_at':
- # 'expires_at' is a special case. Will test it in a separate
- # test case.
- continue
- self.assertRaises(exception.Unauthorized,
- operator.setitem,
- self.auth_context,
- identity_attr,
- uuid.uuid4().hex)
-
- def test_identity_attribute_conflict_with_none_value(self):
- for identity_attr in auth.controllers.AuthContext.IDENTITY_ATTRIBUTES:
- self.auth_context[identity_attr] = None
-
- if identity_attr == 'expires_at':
- # 'expires_at' is a special case and is tested above.
- self.auth_context['expires_at'] = uuid.uuid4().hex
- continue
-
- self.assertRaises(exception.Unauthorized,
- operator.setitem,
- self.auth_context,
- identity_attr,
- uuid.uuid4().hex)
-
- def test_non_identity_attribute_conflict_override(self):
- # for attributes Keystone doesn't know about, make sure they can be
- # freely manipulated
- attr_name = uuid.uuid4().hex
- attr_val_1 = uuid.uuid4().hex
- attr_val_2 = uuid.uuid4().hex
- self.auth_context[attr_name] = attr_val_1
- self.auth_context[attr_name] = attr_val_2
- self.assertEqual(attr_val_2, self.auth_context[attr_name])
-
-
-class TestAuthSpecificData(test_v3.RestfulTestCase):
-
- def test_get_catalog_project_scoped_token(self):
- """Call ``GET /auth/catalog`` with a project-scoped token."""
- r = self.get('/auth/catalog')
- self.assertValidCatalogResponse(r)
-
- def test_get_catalog_domain_scoped_token(self):
- """Call ``GET /auth/catalog`` with a domain-scoped token."""
- # grant a domain role to a user
- self.put(path='/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id']))
-
- self.get(
- '/auth/catalog',
- auth=self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain['id']),
- expected_status=http_client.FORBIDDEN)
-
- def test_get_catalog_unscoped_token(self):
- """Call ``GET /auth/catalog`` with an unscoped token."""
- self.get(
- '/auth/catalog',
- auth=self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password']),
- expected_status=http_client.FORBIDDEN)
-
- def test_get_catalog_no_token(self):
- """Call ``GET /auth/catalog`` without a token."""
- self.get(
- '/auth/catalog',
- noauth=True,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_get_projects_project_scoped_token(self):
- r = self.get('/auth/projects')
- self.assertThat(r.json['projects'], matchers.HasLength(1))
- self.assertValidProjectListResponse(r)
-
- def test_get_domains_project_scoped_token(self):
- self.put(path='/domains/%s/users/%s/roles/%s' % (
- self.domain['id'], self.user['id'], self.role['id']))
-
- r = self.get('/auth/domains')
- self.assertThat(r.json['domains'], matchers.HasLength(1))
- self.assertValidDomainListResponse(r)
-
-
-class TestTrustAuthPKITokenProvider(TrustAPIBehavior, TestTrustChain):
- def config_overrides(self):
- super(TestTrustAuthPKITokenProvider, self).config_overrides()
- self.config_fixture.config(group='token',
- provider='pki',
- revoke_by_id=False)
- self.config_fixture.config(group='trust',
- enabled=True)
-
-
-class TestTrustAuthPKIZTokenProvider(TrustAPIBehavior, TestTrustChain):
- def config_overrides(self):
- super(TestTrustAuthPKIZTokenProvider, self).config_overrides()
- self.config_fixture.config(group='token',
- provider='pkiz',
- revoke_by_id=False)
- self.config_fixture.config(group='trust',
- enabled=True)
-
-
-class TestTrustAuthFernetTokenProvider(TrustAPIBehavior, TestTrustChain):
- def config_overrides(self):
- super(TestTrustAuthFernetTokenProvider, self).config_overrides()
- self.config_fixture.config(group='token',
- provider='fernet',
- revoke_by_id=False)
- self.config_fixture.config(group='trust',
- enabled=True)
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
-
-class TestAuthFernetTokenProvider(TestAuth):
- def setUp(self):
- super(TestAuthFernetTokenProvider, self).setUp()
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- def config_overrides(self):
- super(TestAuthFernetTokenProvider, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
-
- def test_verify_with_bound_token(self):
- self.config_fixture.config(group='token', bind='kerberos')
- auth_data = self.build_authentication_request(
- project_id=self.project['id'])
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- # Bind not current supported by Fernet, see bug 1433311.
- self.v3_create_token(auth_data,
- expected_status=http_client.NOT_IMPLEMENTED)
-
- def test_v2_v3_bind_token_intermix(self):
- self.config_fixture.config(group='token', bind='kerberos')
-
- # we need our own user registered to the default domain because of
- # the way external auth works.
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- body = {'auth': {}}
- # Bind not current supported by Fernet, see bug 1433311.
- self.admin_request(path='/v2.0/tokens',
- method='POST',
- body=body,
- expected_status=http_client.NOT_IMPLEMENTED)
-
- def test_auth_with_bind_token(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
-
- auth_data = self.build_authentication_request()
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- # Bind not current supported by Fernet, see bug 1433311.
- self.v3_create_token(auth_data,
- expected_status=http_client.NOT_IMPLEMENTED)
-
-
-class TestAuthTOTP(test_v3.RestfulTestCase):
-
- def setUp(self):
- super(TestAuthTOTP, self).setUp()
-
- ref = unit.new_totp_credential(
- user_id=self.default_domain_user['id'],
- project_id=self.default_domain_project['id'])
-
- self.secret = ref['blob']
-
- r = self.post('/credentials', body={'credential': ref})
- self.assertValidCredentialResponse(r, ref)
-
- self.addCleanup(self.cleanup)
-
- def auth_plugin_config_override(self):
- methods = ['totp', 'token', 'password']
- super(TestAuthTOTP, self).auth_plugin_config_override(methods)
-
- def _make_credentials(self, cred_type, count=1, user_id=None,
- project_id=None, blob=None):
- user_id = user_id or self.default_domain_user['id']
- project_id = project_id or self.default_domain_project['id']
-
- creds = []
- for __ in range(count):
- if cred_type == 'totp':
- ref = unit.new_totp_credential(
- user_id=user_id, project_id=project_id, blob=blob)
- else:
- ref = unit.new_credential_ref(
- user_id=user_id, project_id=project_id)
- resp = self.post('/credentials', body={'credential': ref})
- creds.append(resp.json['credential'])
- return creds
-
- def _make_auth_data_by_id(self, passcode, user_id=None):
- return self.build_authentication_request(
- user_id=user_id or self.default_domain_user['id'],
- passcode=passcode,
- project_id=self.project['id'])
-
- def _make_auth_data_by_name(self, passcode, username, user_domain_id):
- return self.build_authentication_request(
- username=username,
- user_domain_id=user_domain_id,
- passcode=passcode,
- project_id=self.project['id'])
-
- def cleanup(self):
- totp_creds = self.credential_api.list_credentials_for_user(
- self.default_domain_user['id'], type='totp')
-
- other_creds = self.credential_api.list_credentials_for_user(
- self.default_domain_user['id'], type='other')
-
- for cred in itertools.chain(other_creds, totp_creds):
- self.delete('/credentials/%s' % cred['id'],
- expected_status=http_client.NO_CONTENT)
-
- def test_with_a_valid_passcode(self):
- creds = self._make_credentials('totp')
- secret = creds[-1]['blob']
- auth_data = self._make_auth_data_by_id(
- totp._generate_totp_passcode(secret))
-
- self.v3_create_token(auth_data, expected_status=http_client.CREATED)
-
- def test_with_an_invalid_passcode_and_user_credentials(self):
- self._make_credentials('totp')
- auth_data = self._make_auth_data_by_id('000000')
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_with_an_invalid_passcode_with_no_user_credentials(self):
- auth_data = self._make_auth_data_by_id('000000')
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_with_a_corrupt_totp_credential(self):
- self._make_credentials('totp', count=1, blob='0')
- auth_data = self._make_auth_data_by_id('000000')
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_with_multiple_credentials(self):
- self._make_credentials('other', 3)
- creds = self._make_credentials('totp', count=3)
- secret = creds[-1]['blob']
-
- auth_data = self._make_auth_data_by_id(
- totp._generate_totp_passcode(secret))
- self.v3_create_token(auth_data, expected_status=http_client.CREATED)
-
- def test_with_multiple_users(self):
- # make some credentials for the existing user
- self._make_credentials('totp', count=3)
-
- # create a new user and their credentials
- user = unit.create_user(self.identity_api, domain_id=self.domain_id)
- self.assignment_api.create_grant(self.role['id'],
- user_id=user['id'],
- project_id=self.project['id'])
- creds = self._make_credentials('totp', count=1, user_id=user['id'])
- secret = creds[-1]['blob']
-
- # Stop the clock otherwise there is a chance of auth failure due to
- # getting a different TOTP between the call here and the call in the
- # auth plugin.
- self.useFixture(fixture.TimeFixture())
-
- auth_data = self._make_auth_data_by_id(
- totp._generate_totp_passcode(secret), user_id=user['id'])
- self.v3_create_token(auth_data, expected_status=http_client.CREATED)
-
- def test_with_multiple_users_and_invalid_credentials(self):
- """Prevent logging in with someone else's credentials.
-
- It's very easy to forget to limit the credentials query by user.
- Let's just test it for a sanity check.
- """
- # make some credentials for the existing user
- self._make_credentials('totp', count=3)
-
- # create a new user and their credentials
- new_user = unit.create_user(self.identity_api,
- domain_id=self.domain_id)
- self.assignment_api.create_grant(self.role['id'],
- user_id=new_user['id'],
- project_id=self.project['id'])
- user2_creds = self._make_credentials(
- 'totp', count=1, user_id=new_user['id'])
-
- user_id = self.default_domain_user['id'] # user1
- secret = user2_creds[-1]['blob']
-
- auth_data = self._make_auth_data_by_id(
- totp._generate_totp_passcode(secret), user_id=user_id)
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_with_username_and_domain_id(self):
- creds = self._make_credentials('totp')
- secret = creds[-1]['blob']
- auth_data = self._make_auth_data_by_name(
- totp._generate_totp_passcode(secret),
- username=self.default_domain_user['name'],
- user_domain_id=self.default_domain_user['domain_id'])
-
- self.v3_create_token(auth_data, expected_status=http_client.CREATED)
-
-
-class TestFetchRevocationList(test_v3.RestfulTestCase):
- """Test fetch token revocation list on the v3 Identity API."""
-
- def config_overrides(self):
- super(TestFetchRevocationList, self).config_overrides()
- self.config_fixture.config(group='token', revoke_by_id=True)
-
- def test_ids_no_tokens(self):
- # When there's no revoked tokens the response is an empty list, and
- # the response is signed.
- res = self.get('/auth/tokens/OS-PKI/revoked')
- signed = res.json['signed']
- clear = cms.cms_verify(signed, CONF.signing.certfile,
- CONF.signing.ca_certs)
- payload = json.loads(clear)
- self.assertEqual({'revoked': []}, payload)
-
- def test_ids_token(self):
- # When there's a revoked token, it's in the response, and the response
- # is signed.
- token_res = self.v3_create_token(
- self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id']))
-
- token_id = token_res.headers.get('X-Subject-Token')
- token_data = token_res.json['token']
-
- self.delete('/auth/tokens', headers={'X-Subject-Token': token_id})
-
- res = self.get('/auth/tokens/OS-PKI/revoked')
- signed = res.json['signed']
- clear = cms.cms_verify(signed, CONF.signing.certfile,
- CONF.signing.ca_certs)
- payload = json.loads(clear)
-
- def truncate(ts_str):
- return ts_str[:19] + 'Z' # 2016-01-21T15:53:52 == 19 chars.
-
- exp_token_revoke_data = {
- 'id': token_id,
- 'audit_id': token_data['audit_ids'][0],
- 'expires': truncate(token_data['expires_at']),
- }
-
- self.assertEqual({'revoked': [exp_token_revoke_data]}, payload)
-
- def test_audit_id_only_no_tokens(self):
- # When there's no revoked tokens and ?audit_id_only is used, the
- # response is an empty list and is not signed.
- res = self.get('/auth/tokens/OS-PKI/revoked?audit_id_only')
- self.assertEqual({'revoked': []}, res.json)
-
- def test_audit_id_only_token(self):
- # When there's a revoked token and ?audit_id_only is used, the
- # response contains the audit_id of the token and is not signed.
- token_res = self.v3_create_token(
- self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id']))
-
- token_id = token_res.headers.get('X-Subject-Token')
- token_data = token_res.json['token']
-
- self.delete('/auth/tokens', headers={'X-Subject-Token': token_id})
-
- res = self.get('/auth/tokens/OS-PKI/revoked?audit_id_only')
-
- def truncate(ts_str):
- return ts_str[:19] + 'Z' # 2016-01-21T15:53:52 == 19 chars.
-
- exp_token_revoke_data = {
- 'audit_id': token_data['audit_ids'][0],
- 'expires': truncate(token_data['expires_at']),
- }
-
- self.assertEqual({'revoked': [exp_token_revoke_data]}, res.json)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_catalog.py b/keystone-moon/keystone/tests/unit/test_v3_catalog.py
deleted file mode 100644
index 2eb9db14..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_catalog.py
+++ /dev/null
@@ -1,924 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-from six.moves import http_client
-from testtools import matchers
-
-from keystone import catalog
-from keystone.tests import unit
-from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit import test_v3
-
-
-class CatalogTestCase(test_v3.RestfulTestCase):
- """Test service & endpoint CRUD."""
-
- # region crud tests
-
- def test_create_region_with_id(self):
- """Call ``PUT /regions/{region_id}`` w/o an ID in the request body."""
- ref = unit.new_region_ref()
- region_id = ref.pop('id')
- r = self.put(
- '/regions/%s' % region_id,
- body={'region': ref},
- expected_status=http_client.CREATED)
- self.assertValidRegionResponse(r, ref)
- # Double-check that the region ID was kept as-is and not
- # populated with a UUID, as is the case with POST /v3/regions
- self.assertEqual(region_id, r.json['region']['id'])
-
- def test_create_region_with_matching_ids(self):
- """Call ``PUT /regions/{region_id}`` with an ID in the request body."""
- ref = unit.new_region_ref()
- region_id = ref['id']
- r = self.put(
- '/regions/%s' % region_id,
- body={'region': ref},
- expected_status=http_client.CREATED)
- self.assertValidRegionResponse(r, ref)
- # Double-check that the region ID was kept as-is and not
- # populated with a UUID, as is the case with POST /v3/regions
- self.assertEqual(region_id, r.json['region']['id'])
-
- def test_create_region_with_duplicate_id(self):
- """Call ``PUT /regions/{region_id}``."""
- ref = dict(description="my region")
- self.put(
- '/regions/myregion',
- body={'region': ref}, expected_status=http_client.CREATED)
- # Create region again with duplicate id
- self.put(
- '/regions/myregion',
- body={'region': ref}, expected_status=http_client.CONFLICT)
-
- def test_create_region(self):
- """Call ``POST /regions`` with an ID in the request body."""
- # the ref will have an ID defined on it
- ref = unit.new_region_ref()
- r = self.post(
- '/regions',
- body={'region': ref})
- self.assertValidRegionResponse(r, ref)
-
- # we should be able to get the region, having defined the ID ourselves
- r = self.get(
- '/regions/%(region_id)s' % {
- 'region_id': ref['id']})
- self.assertValidRegionResponse(r, ref)
-
- def test_create_region_with_empty_id(self):
- """Call ``POST /regions`` with an empty ID in the request body."""
- ref = unit.new_region_ref(id='')
-
- r = self.post('/regions', body={'region': ref})
- self.assertValidRegionResponse(r, ref)
- self.assertNotEmpty(r.result['region'].get('id'))
-
- def test_create_region_without_id(self):
- """Call ``POST /regions`` without an ID in the request body."""
- ref = unit.new_region_ref()
-
- # instead of defining the ID ourselves...
- del ref['id']
-
- # let the service define the ID
- r = self.post('/regions', body={'region': ref})
- self.assertValidRegionResponse(r, ref)
-
- def test_create_region_without_description(self):
- """Call ``POST /regions`` without description in the request body."""
- ref = unit.new_region_ref(description=None)
-
- del ref['description']
-
- r = self.post('/regions', body={'region': ref})
- # Create the description in the reference to compare to since the
- # response should now have a description, even though we didn't send
- # it with the original reference.
- ref['description'] = ''
- self.assertValidRegionResponse(r, ref)
-
- def test_create_regions_with_same_description_string(self):
- """Call ``POST /regions`` with duplicate descriptions."""
- # NOTE(lbragstad): Make sure we can create two regions that have the
- # same description.
- region_desc = 'Some Region Description'
-
- ref1 = unit.new_region_ref(description=region_desc)
- ref2 = unit.new_region_ref(description=region_desc)
-
- resp1 = self.post('/regions', body={'region': ref1})
- self.assertValidRegionResponse(resp1, ref1)
-
- resp2 = self.post('/regions', body={'region': ref2})
- self.assertValidRegionResponse(resp2, ref2)
-
- def test_create_regions_without_descriptions(self):
- """Call ``POST /regions`` with no description."""
- # NOTE(lbragstad): Make sure we can create two regions that have
- # no description in the request body. The description should be
- # populated by Catalog Manager.
- ref1 = unit.new_region_ref()
- ref2 = unit.new_region_ref()
-
- del ref1['description']
- ref2['description'] = None
-
- resp1 = self.post('/regions', body={'region': ref1})
-
- resp2 = self.post('/regions', body={'region': ref2})
- # Create the descriptions in the references to compare to since the
- # responses should now have descriptions, even though we didn't send
- # a description with the original references.
- ref1['description'] = ''
- ref2['description'] = ''
- self.assertValidRegionResponse(resp1, ref1)
- self.assertValidRegionResponse(resp2, ref2)
-
- def test_create_region_with_conflicting_ids(self):
- """Call ``PUT /regions/{region_id}`` with conflicting region IDs."""
- # the region ref is created with an ID
- ref = unit.new_region_ref()
-
- # but instead of using that ID, make up a new, conflicting one
- self.put(
- '/regions/%s' % uuid.uuid4().hex,
- body={'region': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_regions(self):
- """Call ``GET /regions``."""
- r = self.get('/regions')
- self.assertValidRegionListResponse(r, ref=self.region)
-
- def _create_region_with_parent_id(self, parent_id=None):
- ref = unit.new_region_ref(parent_region_id=parent_id)
- return self.post(
- '/regions',
- body={'region': ref})
-
- def test_list_regions_filtered_by_parent_region_id(self):
- """Call ``GET /regions?parent_region_id={parent_region_id}``."""
- new_region = self._create_region_with_parent_id()
- parent_id = new_region.result['region']['id']
-
- new_region = self._create_region_with_parent_id(parent_id)
- new_region = self._create_region_with_parent_id(parent_id)
-
- r = self.get('/regions?parent_region_id=%s' % parent_id)
-
- for region in r.result['regions']:
- self.assertEqual(parent_id, region['parent_region_id'])
-
- def test_get_region(self):
- """Call ``GET /regions/{region_id}``."""
- r = self.get('/regions/%(region_id)s' % {
- 'region_id': self.region_id})
- self.assertValidRegionResponse(r, self.region)
-
- def test_update_region(self):
- """Call ``PATCH /regions/{region_id}``."""
- region = unit.new_region_ref()
- del region['id']
- r = self.patch('/regions/%(region_id)s' % {
- 'region_id': self.region_id},
- body={'region': region})
- self.assertValidRegionResponse(r, region)
-
- def test_update_region_without_description_keeps_original(self):
- """Call ``PATCH /regions/{region_id}``."""
- region_ref = unit.new_region_ref()
-
- resp = self.post('/regions', body={'region': region_ref})
-
- region_updates = {
- # update with something that's not the description
- 'parent_region_id': self.region_id,
- }
- resp = self.patch('/regions/%s' % region_ref['id'],
- body={'region': region_updates})
-
- # NOTE(dstanek): Keystone should keep the original description.
- self.assertEqual(region_ref['description'],
- resp.result['region']['description'])
-
- def test_update_region_with_null_description(self):
- """Call ``PATCH /regions/{region_id}``."""
- region = unit.new_region_ref(description=None)
- del region['id']
- r = self.patch('/regions/%(region_id)s' % {
- 'region_id': self.region_id},
- body={'region': region})
-
- # NOTE(dstanek): Keystone should turn the provided None value into
- # an empty string before storing in the backend.
- region['description'] = ''
- self.assertValidRegionResponse(r, region)
-
- def test_delete_region(self):
- """Call ``DELETE /regions/{region_id}``."""
- ref = unit.new_region_ref()
- r = self.post(
- '/regions',
- body={'region': ref})
- self.assertValidRegionResponse(r, ref)
-
- self.delete('/regions/%(region_id)s' % {
- 'region_id': ref['id']})
-
- # service crud tests
-
- def test_create_service(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref()
- r = self.post(
- '/services',
- body={'service': ref})
- self.assertValidServiceResponse(r, ref)
-
- def test_create_service_no_name(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref()
- del ref['name']
- r = self.post(
- '/services',
- body={'service': ref})
- ref['name'] = ''
- self.assertValidServiceResponse(r, ref)
-
- def test_create_service_no_enabled(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref()
- del ref['enabled']
- r = self.post(
- '/services',
- body={'service': ref})
- ref['enabled'] = True
- self.assertValidServiceResponse(r, ref)
- self.assertIs(True, r.result['service']['enabled'])
-
- def test_create_service_enabled_false(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref(enabled=False)
- r = self.post(
- '/services',
- body={'service': ref})
- self.assertValidServiceResponse(r, ref)
- self.assertIs(False, r.result['service']['enabled'])
-
- def test_create_service_enabled_true(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref(enabled=True)
- r = self.post(
- '/services',
- body={'service': ref})
- self.assertValidServiceResponse(r, ref)
- self.assertIs(True, r.result['service']['enabled'])
-
- def test_create_service_enabled_str_true(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref(enabled='True')
- self.post('/services', body={'service': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_service_enabled_str_false(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref(enabled='False')
- self.post('/services', body={'service': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_service_enabled_str_random(self):
- """Call ``POST /services``."""
- ref = unit.new_service_ref(enabled='puppies')
- self.post('/services', body={'service': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_services(self):
- """Call ``GET /services``."""
- r = self.get('/services')
- self.assertValidServiceListResponse(r, ref=self.service)
-
- def _create_random_service(self):
- ref = unit.new_service_ref()
- response = self.post(
- '/services',
- body={'service': ref})
- return response.json['service']
-
- def test_filter_list_services_by_type(self):
- """Call ``GET /services?type=<some type>``."""
- target_ref = self._create_random_service()
-
- # create unrelated services
- self._create_random_service()
- self._create_random_service()
-
- response = self.get('/services?type=' + target_ref['type'])
- self.assertValidServiceListResponse(response, ref=target_ref)
-
- filtered_service_list = response.json['services']
- self.assertEqual(1, len(filtered_service_list))
-
- filtered_service = filtered_service_list[0]
- self.assertEqual(target_ref['type'], filtered_service['type'])
-
- def test_filter_list_services_by_name(self):
- """Call ``GET /services?name=<some name>``."""
- target_ref = self._create_random_service()
-
- # create unrelated services
- self._create_random_service()
- self._create_random_service()
-
- response = self.get('/services?name=' + target_ref['name'])
- self.assertValidServiceListResponse(response, ref=target_ref)
-
- filtered_service_list = response.json['services']
- self.assertEqual(1, len(filtered_service_list))
-
- filtered_service = filtered_service_list[0]
- self.assertEqual(target_ref['name'], filtered_service['name'])
-
- def test_get_service(self):
- """Call ``GET /services/{service_id}``."""
- r = self.get('/services/%(service_id)s' % {
- 'service_id': self.service_id})
- self.assertValidServiceResponse(r, self.service)
-
- def test_update_service(self):
- """Call ``PATCH /services/{service_id}``."""
- service = unit.new_service_ref()
- del service['id']
- r = self.patch('/services/%(service_id)s' % {
- 'service_id': self.service_id},
- body={'service': service})
- self.assertValidServiceResponse(r, service)
-
- def test_delete_service(self):
- """Call ``DELETE /services/{service_id}``."""
- self.delete('/services/%(service_id)s' % {
- 'service_id': self.service_id})
-
- # endpoint crud tests
-
- def test_list_endpoints(self):
- """Call ``GET /endpoints``."""
- r = self.get('/endpoints')
- self.assertValidEndpointListResponse(r, ref=self.endpoint)
-
- def _create_random_endpoint(self, interface='public',
- parent_region_id=None):
- region = self._create_region_with_parent_id(
- parent_id=parent_region_id)
- service = self._create_random_service()
- ref = unit.new_endpoint_ref(
- service_id=service['id'],
- interface=interface,
- region_id=region.result['region']['id'])
-
- response = self.post(
- '/endpoints',
- body={'endpoint': ref})
- return response.json['endpoint']
-
- def test_list_endpoints_filtered_by_interface(self):
- """Call ``GET /endpoints?interface={interface}``."""
- ref = self._create_random_endpoint(interface='internal')
-
- response = self.get('/endpoints?interface=%s' % ref['interface'])
- self.assertValidEndpointListResponse(response, ref=ref)
-
- for endpoint in response.json['endpoints']:
- self.assertEqual(ref['interface'], endpoint['interface'])
-
- def test_list_endpoints_filtered_by_service_id(self):
- """Call ``GET /endpoints?service_id={service_id}``."""
- ref = self._create_random_endpoint()
-
- response = self.get('/endpoints?service_id=%s' % ref['service_id'])
- self.assertValidEndpointListResponse(response, ref=ref)
-
- for endpoint in response.json['endpoints']:
- self.assertEqual(ref['service_id'], endpoint['service_id'])
-
- def test_list_endpoints_filtered_by_region_id(self):
- """Call ``GET /endpoints?region_id={region_id}``."""
- ref = self._create_random_endpoint()
-
- response = self.get('/endpoints?region_id=%s' % ref['region_id'])
- self.assertValidEndpointListResponse(response, ref=ref)
-
- for endpoint in response.json['endpoints']:
- self.assertEqual(ref['region_id'], endpoint['region_id'])
-
- def test_list_endpoints_filtered_by_parent_region_id(self):
- """Call ``GET /endpoints?region_id={region_id}``.
-
- Ensure passing the parent_region_id as filter returns an
- empty list.
-
- """
- parent_region = self._create_region_with_parent_id()
- parent_region_id = parent_region.result['region']['id']
- self._create_random_endpoint(parent_region_id=parent_region_id)
-
- response = self.get('/endpoints?region_id=%s' % parent_region_id)
- self.assertEqual(0, len(response.json['endpoints']))
-
- def test_list_endpoints_with_multiple_filters(self):
- """Call ``GET /endpoints?interface={interface}...``.
-
- Ensure passing different combinations of interface, region_id and
- service_id as filters will return the correct result.
-
- """
- # interface and region_id specified
- ref = self._create_random_endpoint(interface='internal')
- response = self.get('/endpoints?interface=%s&region_id=%s' %
- (ref['interface'], ref['region_id']))
- self.assertValidEndpointListResponse(response, ref=ref)
-
- for endpoint in response.json['endpoints']:
- self.assertEqual(ref['interface'], endpoint['interface'])
- self.assertEqual(ref['region_id'], endpoint['region_id'])
-
- # interface and service_id specified
- ref = self._create_random_endpoint(interface='internal')
- response = self.get('/endpoints?interface=%s&service_id=%s' %
- (ref['interface'], ref['service_id']))
- self.assertValidEndpointListResponse(response, ref=ref)
-
- for endpoint in response.json['endpoints']:
- self.assertEqual(ref['interface'], endpoint['interface'])
- self.assertEqual(ref['service_id'], endpoint['service_id'])
-
- # region_id and service_id specified
- ref = self._create_random_endpoint(interface='internal')
- response = self.get('/endpoints?region_id=%s&service_id=%s' %
- (ref['region_id'], ref['service_id']))
- self.assertValidEndpointListResponse(response, ref=ref)
-
- for endpoint in response.json['endpoints']:
- self.assertEqual(ref['region_id'], endpoint['region_id'])
- self.assertEqual(ref['service_id'], endpoint['service_id'])
-
- # interface, region_id and service_id specified
- ref = self._create_random_endpoint(interface='internal')
- response = self.get(('/endpoints?interface=%s&region_id=%s'
- '&service_id=%s') %
- (ref['interface'], ref['region_id'],
- ref['service_id']))
- self.assertValidEndpointListResponse(response, ref=ref)
-
- for endpoint in response.json['endpoints']:
- self.assertEqual(ref['interface'], endpoint['interface'])
- self.assertEqual(ref['region_id'], endpoint['region_id'])
- self.assertEqual(ref['service_id'], endpoint['service_id'])
-
- def test_list_endpoints_with_random_filter_values(self):
- """Call ``GET /endpoints?interface={interface}...``.
-
- Ensure passing random values for: interface, region_id and
- service_id will return an empty list.
-
- """
- self._create_random_endpoint(interface='internal')
-
- response = self.get('/endpoints?interface=%s' % uuid.uuid4().hex)
- self.assertEqual(0, len(response.json['endpoints']))
-
- response = self.get('/endpoints?region_id=%s' % uuid.uuid4().hex)
- self.assertEqual(0, len(response.json['endpoints']))
-
- response = self.get('/endpoints?service_id=%s' % uuid.uuid4().hex)
- self.assertEqual(0, len(response.json['endpoints']))
-
- def test_create_endpoint_no_enabled(self):
- """Call ``POST /endpoints``."""
- ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- r = self.post('/endpoints', body={'endpoint': ref})
- ref['enabled'] = True
- self.assertValidEndpointResponse(r, ref)
-
- def test_create_endpoint_enabled_true(self):
- """Call ``POST /endpoints`` with enabled: true."""
- ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id,
- enabled=True)
- r = self.post('/endpoints', body={'endpoint': ref})
- self.assertValidEndpointResponse(r, ref)
-
- def test_create_endpoint_enabled_false(self):
- """Call ``POST /endpoints`` with enabled: false."""
- ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id,
- enabled=False)
- r = self.post('/endpoints', body={'endpoint': ref})
- self.assertValidEndpointResponse(r, ref)
-
- def test_create_endpoint_enabled_str_true(self):
- """Call ``POST /endpoints`` with enabled: 'True'."""
- ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id,
- enabled='True')
- self.post('/endpoints', body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_endpoint_enabled_str_false(self):
- """Call ``POST /endpoints`` with enabled: 'False'."""
- ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id,
- enabled='False')
- self.post('/endpoints', body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_endpoint_enabled_str_random(self):
- """Call ``POST /endpoints`` with enabled: 'puppies'."""
- ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id,
- enabled='puppies')
- self.post('/endpoints', body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_endpoint_with_invalid_region_id(self):
- """Call ``POST /endpoints``."""
- ref = unit.new_endpoint_ref(service_id=self.service_id)
- self.post('/endpoints', body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_endpoint_with_region(self):
- """EndpointV3 creates the region before creating the endpoint.
-
- This occurs when endpoint is provided with 'region' and no 'region_id'.
- """
- ref = unit.new_endpoint_ref_with_region(service_id=self.service_id,
- region=uuid.uuid4().hex)
- self.post('/endpoints', body={'endpoint': ref})
- # Make sure the region is created
- self.get('/regions/%(region_id)s' % {'region_id': ref["region"]})
-
- def test_create_endpoint_with_no_region(self):
- """EndpointV3 allows to creates the endpoint without region."""
- ref = unit.new_endpoint_ref(service_id=self.service_id, region_id=None)
- del ref['region_id'] # cannot just be None, it needs to not exist
- self.post('/endpoints', body={'endpoint': ref})
-
- def test_create_endpoint_with_empty_url(self):
- """Call ``POST /endpoints``."""
- ref = unit.new_endpoint_ref(service_id=self.service_id, url='')
- self.post('/endpoints', body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_get_endpoint(self):
- """Call ``GET /endpoints/{endpoint_id}``."""
- r = self.get(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id})
- self.assertValidEndpointResponse(r, self.endpoint)
-
- def test_update_endpoint(self):
- """Call ``PATCH /endpoints/{endpoint_id}``."""
- ref = unit.new_endpoint_ref(service_id=self.service_id,
- interface='public',
- region_id=self.region_id)
- del ref['id']
- r = self.patch(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id},
- body={'endpoint': ref})
- ref['enabled'] = True
- self.assertValidEndpointResponse(r, ref)
-
- def test_update_endpoint_enabled_true(self):
- """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: True."""
- r = self.patch(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id},
- body={'endpoint': {'enabled': True}})
- self.assertValidEndpointResponse(r, self.endpoint)
-
- def test_update_endpoint_enabled_false(self):
- """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: False."""
- r = self.patch(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id},
- body={'endpoint': {'enabled': False}})
- exp_endpoint = copy.copy(self.endpoint)
- exp_endpoint['enabled'] = False
- self.assertValidEndpointResponse(r, exp_endpoint)
-
- def test_update_endpoint_enabled_str_true(self):
- """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'True'."""
- self.patch(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id},
- body={'endpoint': {'enabled': 'True'}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_endpoint_enabled_str_false(self):
- """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'False'."""
- self.patch(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id},
- body={'endpoint': {'enabled': 'False'}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_endpoint_enabled_str_random(self):
- """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'kitties'."""
- self.patch(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id},
- body={'endpoint': {'enabled': 'kitties'}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_delete_endpoint(self):
- """Call ``DELETE /endpoints/{endpoint_id}``."""
- self.delete(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id})
-
- def test_create_endpoint_on_v2(self):
- # clear the v3 endpoint so we only have endpoints created on v2
- self.delete(
- '/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint_id})
-
- # create a v3 endpoint ref, and then tweak it back to a v2-style ref
- ref = unit.new_endpoint_ref_with_region(service_id=self.service['id'],
- region=uuid.uuid4().hex,
- internalurl=None)
- del ref['id']
- del ref['interface']
- ref['publicurl'] = ref.pop('url')
- # don't set adminurl to ensure it's absence is handled like internalurl
-
- # create the endpoint on v2 (using a v3 token)
- r = self.admin_request(
- method='POST',
- path='/v2.0/endpoints',
- token=self.get_scoped_token(),
- body={'endpoint': ref})
- endpoint_v2 = r.result['endpoint']
-
- # test the endpoint on v3
- r = self.get('/endpoints')
- endpoints = self.assertValidEndpointListResponse(r)
- self.assertEqual(1, len(endpoints))
- endpoint_v3 = endpoints.pop()
-
- # these attributes are identical between both APIs
- self.assertEqual(ref['region'], endpoint_v3['region_id'])
- self.assertEqual(ref['service_id'], endpoint_v3['service_id'])
- self.assertEqual(ref['description'], endpoint_v3['description'])
-
- # a v2 endpoint is not quite the same concept as a v3 endpoint, so they
- # receive different identifiers
- self.assertNotEqual(endpoint_v2['id'], endpoint_v3['id'])
-
- # v2 has a publicurl; v3 has a url + interface type
- self.assertEqual(ref['publicurl'], endpoint_v3['url'])
- self.assertEqual('public', endpoint_v3['interface'])
-
- # tests for bug 1152632 -- these attributes were being returned by v3
- self.assertNotIn('publicurl', endpoint_v3)
- self.assertNotIn('adminurl', endpoint_v3)
- self.assertNotIn('internalurl', endpoint_v3)
-
- # test for bug 1152635 -- this attribute was being returned by v3
- self.assertNotIn('legacy_endpoint_id', endpoint_v3)
-
- self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id'])
-
- def test_deleting_endpoint_with_space_in_url(self):
- # add a space to all urls (intentional "i d" to test bug)
- url_with_space = "http://127.0.0.1:8774 /v1.1/\$(tenant_i d)s"
-
- # create a v3 endpoint ref
- ref = unit.new_endpoint_ref(service_id=self.service['id'],
- region_id=None,
- publicurl=url_with_space,
- internalurl=url_with_space,
- adminurl=url_with_space,
- url=url_with_space)
-
- # add the endpoint to the database
- self.catalog_api.create_endpoint(ref['id'], ref)
-
- # delete the endpoint
- self.delete('/endpoints/%s' % ref['id'])
-
- # make sure it's deleted (GET should return Not Found)
- self.get('/endpoints/%s' % ref['id'],
- expected_status=http_client.NOT_FOUND)
-
- def test_endpoint_create_with_valid_url(self):
- """Create endpoint with valid url should be tested,too."""
- # list one valid url is enough, no need to list too much
- valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
-
- ref = unit.new_endpoint_ref(self.service_id,
- interface='public',
- region_id=self.region_id,
- url=valid_url)
- self.post('/endpoints', body={'endpoint': ref})
-
- def test_endpoint_create_with_valid_url_project_id(self):
- """Create endpoint with valid url should be tested,too."""
- valid_url = 'http://127.0.0.1:8774/v1.1/$(project_id)s'
-
- ref = unit.new_endpoint_ref(self.service_id,
- interface='public',
- region_id=self.region_id,
- url=valid_url)
- self.post('/endpoints', body={'endpoint': ref})
-
- def test_endpoint_create_with_invalid_url(self):
- """Test the invalid cases: substitutions is not exactly right."""
- invalid_urls = [
- # using a substitution that is not whitelisted - KeyError
- 'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
-
- # invalid formatting - ValueError
- 'http://127.0.0.1:8774/v1.1/$(tenant_id)',
- 'http://127.0.0.1:8774/v1.1/$(tenant_id)t',
- 'http://127.0.0.1:8774/v1.1/$(tenant_id',
-
- # invalid type specifier - TypeError
- # admin_url is a string not an int
- 'http://127.0.0.1:8774/v1.1/$(admin_url)d',
- ]
-
- ref = unit.new_endpoint_ref(self.service_id)
-
- for invalid_url in invalid_urls:
- ref['url'] = invalid_url
- self.post('/endpoints',
- body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
-
-
-class TestCatalogAPISQL(unit.TestCase):
- """Tests for the catalog Manager against the SQL backend."""
-
- def setUp(self):
- super(TestCatalogAPISQL, self).setUp()
- self.useFixture(database.Database())
- self.catalog_api = catalog.Manager()
-
- service = unit.new_service_ref()
- self.service_id = service['id']
- self.catalog_api.create_service(self.service_id, service)
-
- self.create_endpoint(service_id=self.service_id)
-
- def create_endpoint(self, service_id, **kwargs):
- endpoint = unit.new_endpoint_ref(service_id=service_id,
- region_id=None, **kwargs)
-
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
- return endpoint
-
- def config_overrides(self):
- super(TestCatalogAPISQL, self).config_overrides()
- self.config_fixture.config(group='catalog', driver='sql')
-
- def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
- user_id = uuid.uuid4().hex
- tenant_id = uuid.uuid4().hex
-
- # the only endpoint in the catalog is the one created in setUp
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
- self.assertEqual(1, len(catalog[0]['endpoints']))
- # it's also the only endpoint in the backend
- self.assertEqual(1, len(self.catalog_api.list_endpoints()))
-
- # create a new, invalid endpoint - malformed type declaration
- self.create_endpoint(self.service_id,
- url='http://keystone/%(tenant_id)')
-
- # create a new, invalid endpoint - nonexistent key
- self.create_endpoint(self.service_id,
- url='http://keystone/%(you_wont_find_me)s')
-
- # verify that the invalid endpoints don't appear in the catalog
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
- self.assertEqual(1, len(catalog[0]['endpoints']))
- # all three appear in the backend
- self.assertEqual(3, len(self.catalog_api.list_endpoints()))
-
- # create another valid endpoint - tenant_id will be replaced
- self.create_endpoint(self.service_id,
- url='http://keystone/%(tenant_id)s')
-
- # there are two valid endpoints, positive check
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
- self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
-
- # If the URL has no 'tenant_id' to substitute, we will skip the
- # endpoint which contains this kind of URL, negative check.
- tenant_id = None
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
- self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
-
- def test_get_catalog_always_returns_service_name(self):
- user_id = uuid.uuid4().hex
- tenant_id = uuid.uuid4().hex
-
- # create a service, with a name
- named_svc = unit.new_service_ref()
- self.catalog_api.create_service(named_svc['id'], named_svc)
- self.create_endpoint(service_id=named_svc['id'])
-
- # create a service, with no name
- unnamed_svc = unit.new_service_ref(name=None)
- del unnamed_svc['name']
- self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
- self.create_endpoint(service_id=unnamed_svc['id'])
-
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
-
- named_endpoint = [ep for ep in catalog
- if ep['type'] == named_svc['type']][0]
- self.assertEqual(named_svc['name'], named_endpoint['name'])
-
- unnamed_endpoint = [ep for ep in catalog
- if ep['type'] == unnamed_svc['type']][0]
- self.assertEqual('', unnamed_endpoint['name'])
-
-
-# TODO(dstanek): this needs refactoring with the test above, but we are in a
-# crunch so that will happen in a future patch.
-class TestCatalogAPISQLRegions(unit.TestCase):
- """Tests for the catalog Manager against the SQL backend."""
-
- def setUp(self):
- super(TestCatalogAPISQLRegions, self).setUp()
- self.useFixture(database.Database())
- self.catalog_api = catalog.Manager()
-
- def config_overrides(self):
- super(TestCatalogAPISQLRegions, self).config_overrides()
- self.config_fixture.config(group='catalog', driver='sql')
-
- def test_get_catalog_returns_proper_endpoints_with_no_region(self):
- service = unit.new_service_ref()
- service_id = service['id']
- self.catalog_api.create_service(service_id, service)
-
- endpoint = unit.new_endpoint_ref(service_id=service_id,
- region_id=None)
- del endpoint['region_id']
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
-
- user_id = uuid.uuid4().hex
- tenant_id = uuid.uuid4().hex
-
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
- self.assertValidCatalogEndpoint(
- catalog[0]['endpoints'][0], ref=endpoint)
-
- def test_get_catalog_returns_proper_endpoints_with_region(self):
- service = unit.new_service_ref()
- service_id = service['id']
- self.catalog_api.create_service(service_id, service)
-
- endpoint = unit.new_endpoint_ref(service_id=service_id)
- region = unit.new_region_ref(id=endpoint['region_id'])
- self.catalog_api.create_region(region)
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
-
- endpoint = self.catalog_api.get_endpoint(endpoint['id'])
- user_id = uuid.uuid4().hex
- tenant_id = uuid.uuid4().hex
-
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
- self.assertValidCatalogEndpoint(
- catalog[0]['endpoints'][0], ref=endpoint)
-
- def assertValidCatalogEndpoint(self, entity, ref=None):
- keys = ['description', 'id', 'interface', 'name', 'region_id', 'url']
- for k in keys:
- self.assertEqual(ref.get(k), entity[k], k)
- self.assertEqual(entity['region_id'], entity['region'])
diff --git a/keystone-moon/keystone/tests/unit/test_v3_controller.py b/keystone-moon/keystone/tests/unit/test_v3_controller.py
deleted file mode 100644
index 563e656e..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_controller.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2014 CERN.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import six
-from six.moves import range
-from testtools import matchers
-
-from keystone.common import controller
-from keystone import exception
-from keystone.tests import unit
-
-
-class V3ControllerTestCase(unit.TestCase):
- """Tests for the V3Controller class."""
- def setUp(self):
- super(V3ControllerTestCase, self).setUp()
-
- class ControllerUnderTest(controller.V3Controller):
- _mutable_parameters = frozenset(['hello', 'world'])
-
- self.api = ControllerUnderTest()
-
- def test_check_immutable_params(self):
- """Pass valid parameters to the method and expect no failure."""
- ref = {
- 'hello': uuid.uuid4().hex,
- 'world': uuid.uuid4().hex
- }
- self.api.check_immutable_params(ref)
-
- def test_check_immutable_params_fail(self):
- """Pass invalid parameter to the method and expect failure."""
- ref = {uuid.uuid4().hex: uuid.uuid4().hex for _ in range(3)}
-
- ex = self.assertRaises(exception.ImmutableAttributeError,
- self.api.check_immutable_params, ref)
- ex_msg = six.text_type(ex)
- self.assertThat(ex_msg, matchers.Contains(self.api.__class__.__name__))
- for key in ref.keys():
- self.assertThat(ex_msg, matchers.Contains(key))
diff --git a/keystone-moon/keystone/tests/unit/test_v3_credential.py b/keystone-moon/keystone/tests/unit/test_v3_credential.py
deleted file mode 100644
index 07995f19..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_credential.py
+++ /dev/null
@@ -1,478 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import hashlib
-import json
-import uuid
-
-from keystoneclient.contrib.ec2 import utils as ec2_utils
-from oslo_config import cfg
-from six.moves import http_client
-from testtools import matchers
-
-from keystone.common import utils
-from keystone.contrib.ec2 import controllers
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-CONF = cfg.CONF
-CRED_TYPE_EC2 = controllers.CRED_TYPE_EC2
-
-
-class CredentialBaseTestCase(test_v3.RestfulTestCase):
- def _create_dict_blob_credential(self):
- blob, credential = unit.new_ec2_credential(user_id=self.user['id'],
- project_id=self.project_id)
-
- # Store the blob as a dict *not* JSON ref bug #1259584
- # This means we can test the dict->json workaround, added
- # as part of the bugfix for backwards compatibility works.
- credential['blob'] = blob
- credential_id = credential['id']
-
- # Create direct via the DB API to avoid validation failure
- self.credential_api.create_credential(credential_id, credential)
-
- return json.dumps(blob), credential_id
-
-
-class CredentialTestCase(CredentialBaseTestCase):
- """Test credential CRUD."""
-
- def setUp(self):
-
- super(CredentialTestCase, self).setUp()
-
- self.credential = unit.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id)
-
- self.credential_api.create_credential(
- self.credential['id'],
- self.credential)
-
- def test_credential_api_delete_credentials_for_project(self):
- self.credential_api.delete_credentials_for_project(self.project_id)
- # Test that the credential that we created in .setUp no longer exists
- # once we delete all credentials for self.project_id
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- credential_id=self.credential['id'])
-
- def test_credential_api_delete_credentials_for_user(self):
- self.credential_api.delete_credentials_for_user(self.user_id)
- # Test that the credential that we created in .setUp no longer exists
- # once we delete all credentials for self.user_id
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- credential_id=self.credential['id'])
-
- def test_list_credentials(self):
- """Call ``GET /credentials``."""
- r = self.get('/credentials')
- self.assertValidCredentialListResponse(r, ref=self.credential)
-
- def test_list_credentials_filtered_by_user_id(self):
- """Call ``GET /credentials?user_id={user_id}``."""
- credential = unit.new_credential_ref(user_id=uuid.uuid4().hex)
- self.credential_api.create_credential(credential['id'], credential)
-
- r = self.get('/credentials?user_id=%s' % self.user['id'])
- self.assertValidCredentialListResponse(r, ref=self.credential)
- for cred in r.result['credentials']:
- self.assertEqual(self.user['id'], cred['user_id'])
-
- def test_list_credentials_filtered_by_type(self):
- """Call ``GET /credentials?type={type}``."""
- # The type ec2 was chosen, instead of a random string,
- # because the type must be in the list of supported types
- ec2_credential = unit.new_credential_ref(user_id=uuid.uuid4().hex,
- project_id=self.project_id,
- type=CRED_TYPE_EC2)
-
- ec2_resp = self.credential_api.create_credential(
- ec2_credential['id'], ec2_credential)
-
- # The type cert was chosen for the same reason as ec2
- r = self.get('/credentials?type=cert')
-
- # Testing the filter for two different types
- self.assertValidCredentialListResponse(r, ref=self.credential)
- for cred in r.result['credentials']:
- self.assertEqual('cert', cred['type'])
-
- r_ec2 = self.get('/credentials?type=ec2')
- self.assertThat(r_ec2.result['credentials'], matchers.HasLength(1))
- cred_ec2 = r_ec2.result['credentials'][0]
-
- self.assertValidCredentialListResponse(r_ec2, ref=ec2_resp)
- self.assertEqual(CRED_TYPE_EC2, cred_ec2['type'])
- self.assertEqual(ec2_credential['id'], cred_ec2['id'])
-
- def test_list_credentials_filtered_by_type_and_user_id(self):
- """Call ``GET /credentials?user_id={user_id}&type={type}``."""
- user1_id = uuid.uuid4().hex
- user2_id = uuid.uuid4().hex
-
- # Creating credentials for two different users
- credential_user1_ec2 = unit.new_credential_ref(user_id=user1_id,
- type=CRED_TYPE_EC2)
- credential_user1_cert = unit.new_credential_ref(user_id=user1_id)
- credential_user2_cert = unit.new_credential_ref(user_id=user2_id)
-
- self.credential_api.create_credential(
- credential_user1_ec2['id'], credential_user1_ec2)
- self.credential_api.create_credential(
- credential_user1_cert['id'], credential_user1_cert)
- self.credential_api.create_credential(
- credential_user2_cert['id'], credential_user2_cert)
-
- r = self.get('/credentials?user_id=%s&type=ec2' % user1_id)
- self.assertValidCredentialListResponse(r, ref=credential_user1_ec2)
- self.assertThat(r.result['credentials'], matchers.HasLength(1))
- cred = r.result['credentials'][0]
- self.assertEqual(CRED_TYPE_EC2, cred['type'])
- self.assertEqual(user1_id, cred['user_id'])
-
- def test_create_credential(self):
- """Call ``POST /credentials``."""
- ref = unit.new_credential_ref(user_id=self.user['id'])
- r = self.post(
- '/credentials',
- body={'credential': ref})
- self.assertValidCredentialResponse(r, ref)
-
- def test_get_credential(self):
- """Call ``GET /credentials/{credential_id}``."""
- r = self.get(
- '/credentials/%(credential_id)s' % {
- 'credential_id': self.credential['id']})
- self.assertValidCredentialResponse(r, self.credential)
-
- def test_update_credential(self):
- """Call ``PATCH /credentials/{credential_id}``."""
- ref = unit.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id)
- del ref['id']
- r = self.patch(
- '/credentials/%(credential_id)s' % {
- 'credential_id': self.credential['id']},
- body={'credential': ref})
- self.assertValidCredentialResponse(r, ref)
-
- def test_delete_credential(self):
- """Call ``DELETE /credentials/{credential_id}``."""
- self.delete(
- '/credentials/%(credential_id)s' % {
- 'credential_id': self.credential['id']})
-
- def test_create_ec2_credential(self):
- """Call ``POST /credentials`` for creating ec2 credential."""
- blob, ref = unit.new_ec2_credential(user_id=self.user['id'],
- project_id=self.project_id)
- r = self.post('/credentials', body={'credential': ref})
- self.assertValidCredentialResponse(r, ref)
- # Assert credential id is same as hash of access key id for
- # ec2 credentials
- access = blob['access'].encode('utf-8')
- self.assertEqual(hashlib.sha256(access).hexdigest(),
- r.result['credential']['id'])
- # Create second ec2 credential with the same access key id and check
- # for conflict.
- self.post(
- '/credentials',
- body={'credential': ref}, expected_status=http_client.CONFLICT)
-
- def test_get_ec2_dict_blob(self):
- """Ensure non-JSON blob data is correctly converted."""
- expected_blob, credential_id = self._create_dict_blob_credential()
-
- r = self.get(
- '/credentials/%(credential_id)s' % {
- 'credential_id': credential_id})
-
- # use json.loads to transform the blobs back into Python dictionaries
- # to avoid problems with the keys being in different orders.
- self.assertEqual(json.loads(expected_blob),
- json.loads(r.result['credential']['blob']))
-
- def test_list_ec2_dict_blob(self):
- """Ensure non-JSON blob data is correctly converted."""
- expected_blob, credential_id = self._create_dict_blob_credential()
-
- list_r = self.get('/credentials')
- list_creds = list_r.result['credentials']
- list_ids = [r['id'] for r in list_creds]
- self.assertIn(credential_id, list_ids)
- # use json.loads to transform the blobs back into Python dictionaries
- # to avoid problems with the keys being in different orders.
- for r in list_creds:
- if r['id'] == credential_id:
- self.assertEqual(json.loads(expected_blob),
- json.loads(r['blob']))
-
- def test_create_non_ec2_credential(self):
- """Test creating non-ec2 credential.
-
- Call ``POST /credentials``.
- """
- blob, ref = unit.new_cert_credential(user_id=self.user['id'])
-
- r = self.post('/credentials', body={'credential': ref})
- self.assertValidCredentialResponse(r, ref)
- # Assert credential id is not same as hash of access key id for
- # non-ec2 credentials
- access = blob['access'].encode('utf-8')
- self.assertNotEqual(hashlib.sha256(access).hexdigest(),
- r.result['credential']['id'])
-
- def test_create_ec2_credential_with_missing_project_id(self):
- """Test Creating ec2 credential with missing project_id.
-
- Call ``POST /credentials``.
- """
- _, ref = unit.new_ec2_credential(user_id=self.user['id'],
- project_id=None)
- # Assert bad request status when missing project_id
- self.post(
- '/credentials',
- body={'credential': ref}, expected_status=http_client.BAD_REQUEST)
-
- def test_create_ec2_credential_with_invalid_blob(self):
- """Test creating ec2 credential with invalid blob.
-
- Call ``POST /credentials``.
- """
- ref = unit.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id,
- blob='{"abc":"def"d}',
- type=CRED_TYPE_EC2)
- # Assert bad request status when request contains invalid blob
- response = self.post(
- '/credentials',
- body={'credential': ref}, expected_status=http_client.BAD_REQUEST)
- self.assertValidErrorResponse(response)
-
- def test_create_credential_with_admin_token(self):
- # Make sure we can create credential with the static admin token
- ref = unit.new_credential_ref(user_id=self.user['id'])
- r = self.post(
- '/credentials',
- body={'credential': ref},
- token=self.get_admin_token())
- self.assertValidCredentialResponse(r, ref)
-
-
-class TestCredentialTrustScoped(test_v3.RestfulTestCase):
- """Test credential with trust scoped token."""
-
- def setUp(self):
- super(TestCredentialTrustScoped, self).setUp()
-
- self.trustee_user = unit.new_user_ref(domain_id=self.domain_id)
- password = self.trustee_user['password']
- self.trustee_user = self.identity_api.create_user(self.trustee_user)
- self.trustee_user['password'] = password
- self.trustee_user_id = self.trustee_user['id']
-
- def config_overrides(self):
- super(TestCredentialTrustScoped, self).config_overrides()
- self.config_fixture.config(group='trust', enabled=True)
-
- def test_trust_scoped_ec2_credential(self):
- """Test creating trust scoped ec2 credential.
-
- Call ``POST /credentials``.
- """
- # Create the trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- del ref['id']
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- # Get a trust scoped token
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(r, self.user)
- trust_id = r.result['token']['OS-TRUST:trust']['id']
- token_id = r.headers.get('X-Subject-Token')
-
- # Create the credential with the trust scoped token
- blob, ref = unit.new_ec2_credential(user_id=self.user['id'],
- project_id=self.project_id)
- r = self.post('/credentials', body={'credential': ref}, token=token_id)
-
- # We expect the response blob to contain the trust_id
- ret_ref = ref.copy()
- ret_blob = blob.copy()
- ret_blob['trust_id'] = trust_id
- ret_ref['blob'] = json.dumps(ret_blob)
- self.assertValidCredentialResponse(r, ref=ret_ref)
-
- # Assert credential id is same as hash of access key id for
- # ec2 credentials
- access = blob['access'].encode('utf-8')
- self.assertEqual(hashlib.sha256(access).hexdigest(),
- r.result['credential']['id'])
-
- # Create second ec2 credential with the same access key id and check
- # for conflict.
- self.post(
- '/credentials',
- body={'credential': ref},
- token=token_id,
- expected_status=http_client.CONFLICT)
-
-
-class TestCredentialEc2(CredentialBaseTestCase):
- """Test v3 credential compatibility with ec2tokens."""
-
- def setUp(self):
- super(TestCredentialEc2, self).setUp()
-
- def _validate_signature(self, access, secret):
- """Test signature validation with the access/secret provided."""
- signer = ec2_utils.Ec2Signer(secret)
- params = {'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': '2',
- 'AWSAccessKeyId': access}
- request = {'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- signature = signer.generate(request)
-
- # Now make a request to validate the signed dummy request via the
- # ec2tokens API. This proves the v3 ec2 credentials actually work.
- sig_ref = {'access': access,
- 'signature': signature,
- 'host': 'foo',
- 'verb': 'GET',
- 'path': '/bar',
- 'params': params}
- r = self.post(
- '/ec2tokens',
- body={'ec2Credentials': sig_ref},
- expected_status=http_client.OK)
- self.assertValidTokenResponse(r)
-
- def test_ec2_credential_signature_validate(self):
- """Test signature validation with a v3 ec2 credential."""
- blob, ref = unit.new_ec2_credential(user_id=self.user['id'],
- project_id=self.project_id)
- r = self.post('/credentials', body={'credential': ref})
- self.assertValidCredentialResponse(r, ref)
- # Assert credential id is same as hash of access key id
- access = blob['access'].encode('utf-8')
- self.assertEqual(hashlib.sha256(access).hexdigest(),
- r.result['credential']['id'])
-
- cred_blob = json.loads(r.result['credential']['blob'])
- self.assertEqual(blob, cred_blob)
- self._validate_signature(access=cred_blob['access'],
- secret=cred_blob['secret'])
-
- def test_ec2_credential_signature_validate_legacy(self):
- """Test signature validation with a legacy v3 ec2 credential."""
- cred_json, _ = self._create_dict_blob_credential()
- cred_blob = json.loads(cred_json)
- self._validate_signature(access=cred_blob['access'],
- secret=cred_blob['secret'])
-
- def _get_ec2_cred_uri(self):
- return '/users/%s/credentials/OS-EC2' % self.user_id
-
- def _get_ec2_cred(self):
- uri = self._get_ec2_cred_uri()
- r = self.post(uri, body={'tenant_id': self.project_id})
- return r.result['credential']
-
- def test_ec2_create_credential(self):
- """Test ec2 credential creation."""
- ec2_cred = self._get_ec2_cred()
- self.assertEqual(self.user_id, ec2_cred['user_id'])
- self.assertEqual(self.project_id, ec2_cred['tenant_id'])
- self.assertIsNone(ec2_cred['trust_id'])
- self._validate_signature(access=ec2_cred['access'],
- secret=ec2_cred['secret'])
- uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
- self.assertThat(ec2_cred['links']['self'],
- matchers.EndsWith(uri))
-
- def test_ec2_get_credential(self):
- ec2_cred = self._get_ec2_cred()
- uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
- r = self.get(uri)
- self.assertDictEqual(ec2_cred, r.result['credential'])
- self.assertThat(ec2_cred['links']['self'],
- matchers.EndsWith(uri))
-
- def test_ec2_cannot_get_non_ec2_credential(self):
- access_key = uuid.uuid4().hex
- cred_id = utils.hash_access_key(access_key)
- non_ec2_cred = unit.new_credential_ref(
- user_id=self.user_id,
- project_id=self.project_id)
- non_ec2_cred['id'] = cred_id
- self.credential_api.create_credential(cred_id, non_ec2_cred)
- uri = '/'.join([self._get_ec2_cred_uri(), access_key])
- # if access_key is not found, ec2 controller raises Unauthorized
- # exception
- self.get(uri, expected_status=http_client.UNAUTHORIZED)
-
- def test_ec2_list_credentials(self):
- """Test ec2 credential listing."""
- self._get_ec2_cred()
- uri = self._get_ec2_cred_uri()
- r = self.get(uri)
- cred_list = r.result['credentials']
- self.assertEqual(1, len(cred_list))
- self.assertThat(r.result['links']['self'],
- matchers.EndsWith(uri))
-
- # non-EC2 credentials won't be fetched
- non_ec2_cred = unit.new_credential_ref(
- user_id=self.user_id,
- project_id=self.project_id)
- non_ec2_cred['type'] = uuid.uuid4().hex
- self.credential_api.create_credential(non_ec2_cred['id'],
- non_ec2_cred)
- r = self.get(uri)
- cred_list_2 = r.result['credentials']
- # still one element because non-EC2 credentials are not returned.
- self.assertEqual(1, len(cred_list_2))
- self.assertEqual(cred_list[0], cred_list_2[0])
-
- def test_ec2_delete_credential(self):
- """Test ec2 credential deletion."""
- ec2_cred = self._get_ec2_cred()
- uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
- cred_from_credential_api = (
- self.credential_api
- .list_credentials_for_user(self.user_id, type=CRED_TYPE_EC2))
- self.assertEqual(1, len(cred_from_credential_api))
- self.delete(uri)
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- cred_from_credential_api[0]['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_v3_domain_config.py b/keystone-moon/keystone/tests/unit/test_v3_domain_config.py
deleted file mode 100644
index ee716081..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_domain_config.py
+++ /dev/null
@@ -1,459 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-from oslo_config import cfg
-from six.moves import http_client
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-CONF = cfg.CONF
-
-
-class DomainConfigTestCase(test_v3.RestfulTestCase):
- """Test domain config support."""
-
- def setUp(self):
- super(DomainConfigTestCase, self).setUp()
-
- self.domain = unit.new_domain_ref()
- self.resource_api.create_domain(self.domain['id'], self.domain)
- self.config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_tree_dn': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
-
- def test_create_config(self):
- """Call ``PUT /domains/{domain_id}/config``."""
- url = '/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']}
- r = self.put(url, body={'config': self.config},
- expected_status=http_client.CREATED)
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertEqual(self.config, r.result['config'])
- self.assertEqual(self.config, res)
-
- def test_create_config_invalid_domain(self):
- """Call ``PUT /domains/{domain_id}/config``
-
- While creating Identity API-based domain config with an invalid domain
- id provided, the request shall be rejected with a response, 404 domain
- not found.
- """
- invalid_domain_id = uuid.uuid4().hex
- url = '/domains/%(domain_id)s/config' % {
- 'domain_id': invalid_domain_id}
- self.put(url, body={'config': self.config},
- expected_status=exception.DomainNotFound.code)
-
- def test_create_config_twice(self):
- """Check multiple creates don't throw error"""
- self.put('/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']},
- body={'config': self.config},
- expected_status=http_client.CREATED)
- self.put('/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']},
- body={'config': self.config},
- expected_status=http_client.OK)
-
- def test_delete_config(self):
- """Call ``DELETE /domains{domain_id}/config``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- self.delete('/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']})
- self.get('/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']},
- expected_status=exception.DomainConfigNotFound.code)
-
- def test_delete_config_invalid_domain(self):
- """Call ``DELETE /domains{domain_id}/config``
-
- While deleting Identity API-based domain config with an invalid domain
- id provided, the request shall be rejected with a response, 404 domain
- not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- invalid_domain_id = uuid.uuid4().hex
- self.delete('/domains/%(domain_id)s/config' % {
- 'domain_id': invalid_domain_id},
- expected_status=exception.DomainNotFound.code)
-
- def test_delete_config_by_group(self):
- """Call ``DELETE /domains{domain_id}/config/{group}``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- self.delete('/domains/%(domain_id)s/config/ldap' % {
- 'domain_id': self.domain['id']})
- res = self.domain_config_api.get_config(self.domain['id'])
- self.assertNotIn('ldap', res)
-
- def test_delete_config_by_group_invalid_domain(self):
- """Call ``DELETE /domains{domain_id}/config/{group}``
-
- While deleting Identity API-based domain config by group with an
- invalid domain id provided, the request shall be rejected with a
- response 404 domain not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- invalid_domain_id = uuid.uuid4().hex
- self.delete('/domains/%(domain_id)s/config/ldap' % {
- 'domain_id': invalid_domain_id},
- expected_status=exception.DomainNotFound.code)
-
- def test_get_head_config(self):
- """Call ``GET & HEAD for /domains{domain_id}/config``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- url = '/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']}
- r = self.get(url)
- self.assertEqual(self.config, r.result['config'])
- self.head(url, expected_status=http_client.OK)
-
- def test_get_config_by_group(self):
- """Call ``GET & HEAD /domains{domain_id}/config/{group}``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- url = '/domains/%(domain_id)s/config/ldap' % {
- 'domain_id': self.domain['id']}
- r = self.get(url)
- self.assertEqual({'ldap': self.config['ldap']}, r.result['config'])
- self.head(url, expected_status=http_client.OK)
-
- def test_get_config_by_group_invalid_domain(self):
- """Call ``GET & HEAD /domains{domain_id}/config/{group}``
-
- While retrieving Identity API-based domain config by group with an
- invalid domain id provided, the request shall be rejected with a
- response 404 domain not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- invalid_domain_id = uuid.uuid4().hex
- self.get('/domains/%(domain_id)s/config/ldap' % {
- 'domain_id': invalid_domain_id},
- expected_status=exception.DomainNotFound.code)
-
- def test_get_config_by_option(self):
- """Call ``GET & HEAD /domains{domain_id}/config/{group}/{option}``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- url = '/domains/%(domain_id)s/config/ldap/url' % {
- 'domain_id': self.domain['id']}
- r = self.get(url)
- self.assertEqual({'url': self.config['ldap']['url']},
- r.result['config'])
- self.head(url, expected_status=http_client.OK)
-
- def test_get_config_by_option_invalid_domain(self):
- """Call ``GET & HEAD /domains{domain_id}/config/{group}/{option}``
-
- While retrieving Identity API-based domain config by option with an
- invalid domain id provided, the request shall be rejected with a
- response 404 domain not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- invalid_domain_id = uuid.uuid4().hex
- self.get('/domains/%(domain_id)s/config/ldap/url' % {
- 'domain_id': invalid_domain_id},
- expected_status=exception.DomainNotFound.code)
-
- def test_get_non_existant_config(self):
- """Call ``GET /domains{domain_id}/config when no config defined``."""
- self.get('/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']},
- expected_status=http_client.NOT_FOUND)
-
- def test_get_non_existant_config_invalid_domain(self):
- """Call ``GET /domains{domain_id}/config when no config defined``
-
- While retrieving non-existent Identity API-based domain config with an
- invalid domain id provided, the request shall be rejected with a
- response 404 domain not found.
- """
- invalid_domain_id = uuid.uuid4().hex
- self.get('/domains/%(domain_id)s/config' % {
- 'domain_id': invalid_domain_id},
- expected_status=exception.DomainNotFound.code)
-
- def test_get_non_existant_config_group(self):
- """Call ``GET /domains{domain_id}/config/{group_not_exist}``."""
- config = {'ldap': {'url': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- self.get('/domains/%(domain_id)s/config/identity' % {
- 'domain_id': self.domain['id']},
- expected_status=http_client.NOT_FOUND)
-
- def test_get_non_existant_config_group_invalid_domain(self):
- """Call ``GET /domains{domain_id}/config/{group_not_exist}``
-
- While retrieving non-existent Identity API-based domain config group
- with an invalid domain id provided, the request shall be rejected with
- a response, 404 domain not found.
- """
- config = {'ldap': {'url': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- invalid_domain_id = uuid.uuid4().hex
- self.get('/domains/%(domain_id)s/config/identity' % {
- 'domain_id': invalid_domain_id},
- expected_status=exception.DomainNotFound.code)
-
- def test_get_non_existant_config_option(self):
- """Call ``GET /domains{domain_id}/config/group/{option_not_exist}``."""
- config = {'ldap': {'url': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- self.get('/domains/%(domain_id)s/config/ldap/user_tree_dn' % {
- 'domain_id': self.domain['id']},
- expected_status=http_client.NOT_FOUND)
-
- def test_get_non_existant_config_option_invalid_domain(self):
- """Call ``GET /domains{domain_id}/config/group/{option_not_exist}``
-
- While retrieving non-existent Identity API-based domain config option
- with an invalid domain id provided, the request shall be rejected with
- a response, 404 domain not found.
- """
- config = {'ldap': {'url': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- invalid_domain_id = uuid.uuid4().hex
- self.get('/domains/%(domain_id)s/config/ldap/user_tree_dn' % {
- 'domain_id': invalid_domain_id},
- expected_status=exception.DomainNotFound.code)
-
- def test_update_config(self):
- """Call ``PATCH /domains/{domain_id}/config``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- new_config = {'ldap': {'url': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- r = self.patch('/domains/%(domain_id)s/config' % {
- 'domain_id': self.domain['id']},
- body={'config': new_config})
- res = self.domain_config_api.get_config(self.domain['id'])
- expected_config = copy.deepcopy(self.config)
- expected_config['ldap']['url'] = new_config['ldap']['url']
- expected_config['identity']['driver'] = (
- new_config['identity']['driver'])
- self.assertEqual(expected_config, r.result['config'])
- self.assertEqual(expected_config, res)
-
- def test_update_config_invalid_domain(self):
- """Call ``PATCH /domains/{domain_id}/config``
-
- While updating Identity API-based domain config with an invalid domain
- id provided, the request shall be rejected with a response, 404 domain
- not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- new_config = {'ldap': {'url': uuid.uuid4().hex},
- 'identity': {'driver': uuid.uuid4().hex}}
- invalid_domain_id = uuid.uuid4().hex
- self.patch('/domains/%(domain_id)s/config' % {
- 'domain_id': invalid_domain_id},
- body={'config': new_config},
- expected_status=exception.DomainNotFound.code)
-
- def test_update_config_group(self):
- """Call ``PATCH /domains/{domain_id}/config/{group}``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- new_config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_filter': uuid.uuid4().hex}}
- r = self.patch('/domains/%(domain_id)s/config/ldap' % {
- 'domain_id': self.domain['id']},
- body={'config': new_config})
- res = self.domain_config_api.get_config(self.domain['id'])
- expected_config = copy.deepcopy(self.config)
- expected_config['ldap']['url'] = new_config['ldap']['url']
- expected_config['ldap']['user_filter'] = (
- new_config['ldap']['user_filter'])
- self.assertEqual(expected_config, r.result['config'])
- self.assertEqual(expected_config, res)
-
- def test_update_config_group_invalid_domain(self):
- """Call ``PATCH /domains/{domain_id}/config/{group}``
-
- While updating Identity API-based domain config group with an invalid
- domain id provided, the request shall be rejected with a response,
- 404 domain not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- new_config = {'ldap': {'url': uuid.uuid4().hex,
- 'user_filter': uuid.uuid4().hex}}
- invalid_domain_id = uuid.uuid4().hex
- self.patch('/domains/%(domain_id)s/config/ldap' % {
- 'domain_id': invalid_domain_id},
- body={'config': new_config},
- expected_status=exception.DomainNotFound.code)
-
- def test_update_config_invalid_group(self):
- """Call ``PATCH /domains/{domain_id}/config/{invalid_group}``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
-
- # Trying to update a group that is neither whitelisted or sensitive
- # should result in Forbidden.
- invalid_group = uuid.uuid4().hex
- new_config = {invalid_group: {'url': uuid.uuid4().hex,
- 'user_filter': uuid.uuid4().hex}}
- self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % {
- 'domain_id': self.domain['id'], 'invalid_group': invalid_group},
- body={'config': new_config},
- expected_status=http_client.FORBIDDEN)
- # Trying to update a valid group, but one that is not in the current
- # config should result in NotFound
- config = {'ldap': {'suffix': uuid.uuid4().hex}}
- self.domain_config_api.create_config(self.domain['id'], config)
- new_config = {'identity': {'driver': uuid.uuid4().hex}}
- self.patch('/domains/%(domain_id)s/config/identity' % {
- 'domain_id': self.domain['id']},
- body={'config': new_config},
- expected_status=http_client.NOT_FOUND)
-
- def test_update_config_invalid_group_invalid_domain(self):
- """Call ``PATCH /domains/{domain_id}/config/{invalid_group}``
-
- While updating Identity API-based domain config with an invalid group
- and an invalid domain id provided, the request shall be rejected
- with a response, 404 domain not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- invalid_group = uuid.uuid4().hex
- new_config = {invalid_group: {'url': uuid.uuid4().hex,
- 'user_filter': uuid.uuid4().hex}}
- invalid_domain_id = uuid.uuid4().hex
- self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % {
- 'domain_id': invalid_domain_id,
- 'invalid_group': invalid_group},
- body={'config': new_config},
- expected_status=exception.DomainNotFound.code)
-
- def test_update_config_option(self):
- """Call ``PATCH /domains/{domain_id}/config/{group}/{option}``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- new_config = {'url': uuid.uuid4().hex}
- r = self.patch('/domains/%(domain_id)s/config/ldap/url' % {
- 'domain_id': self.domain['id']},
- body={'config': new_config})
- res = self.domain_config_api.get_config(self.domain['id'])
- expected_config = copy.deepcopy(self.config)
- expected_config['ldap']['url'] = new_config['url']
- self.assertEqual(expected_config, r.result['config'])
- self.assertEqual(expected_config, res)
-
- def test_update_config_option_invalid_domain(self):
- """Call ``PATCH /domains/{domain_id}/config/{group}/{option}``
-
- While updating Identity API-based domain config option with an invalid
- domain id provided, the request shall be rejected with a response, 404
- domain not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- new_config = {'url': uuid.uuid4().hex}
- invalid_domain_id = uuid.uuid4().hex
- self.patch('/domains/%(domain_id)s/config/ldap/url' % {
- 'domain_id': invalid_domain_id},
- body={'config': new_config},
- expected_status=exception.DomainNotFound.code)
-
- def test_update_config_invalid_option(self):
- """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``."""
- self.domain_config_api.create_config(self.domain['id'], self.config)
- invalid_option = uuid.uuid4().hex
- new_config = {'ldap': {invalid_option: uuid.uuid4().hex}}
- # Trying to update an option that is neither whitelisted or sensitive
- # should result in Forbidden.
- self.patch(
- '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % {
- 'domain_id': self.domain['id'],
- 'invalid_option': invalid_option},
- body={'config': new_config},
- expected_status=http_client.FORBIDDEN)
- # Trying to update a valid option, but one that is not in the current
- # config should result in NotFound
- new_config = {'suffix': uuid.uuid4().hex}
- self.patch(
- '/domains/%(domain_id)s/config/ldap/suffix' % {
- 'domain_id': self.domain['id']},
- body={'config': new_config},
- expected_status=http_client.NOT_FOUND)
-
- def test_update_config_invalid_option_invalid_domain(self):
- """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``
-
- While updating Identity API-based domain config with an invalid option
- and an invalid domain id provided, the request shall be rejected
- with a response, 404 domain not found.
- """
- self.domain_config_api.create_config(self.domain['id'], self.config)
- invalid_option = uuid.uuid4().hex
- new_config = {'ldap': {invalid_option: uuid.uuid4().hex}}
- invalid_domain_id = uuid.uuid4().hex
- self.patch(
- '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % {
- 'domain_id': invalid_domain_id,
- 'invalid_option': invalid_option},
- body={'config': new_config},
- expected_status=exception.DomainNotFound.code)
-
- def test_get_config_default(self):
- """Call ``GET /domains/config/default``."""
- # Create a config that overrides a few of the options so that we can
- # check that only the defaults are returned.
- self.domain_config_api.create_config(self.domain['id'], self.config)
- url = '/domains/config/default'
- r = self.get(url)
- default_config = r.result['config']
- for group in default_config:
- for option in default_config[group]:
- self.assertEqual(getattr(getattr(CONF, group), option),
- default_config[group][option])
-
- def test_get_config_default_by_group(self):
- """Call ``GET /domains/config/{group}/default``."""
- # Create a config that overrides a few of the options so that we can
- # check that only the defaults are returned.
- self.domain_config_api.create_config(self.domain['id'], self.config)
- url = '/domains/config/ldap/default'
- r = self.get(url)
- default_config = r.result['config']
- for option in default_config['ldap']:
- self.assertEqual(getattr(CONF.ldap, option),
- default_config['ldap'][option])
-
- def test_get_config_default_by_option(self):
- """Call ``GET /domains/config/{group}/{option}/default``."""
- # Create a config that overrides a few of the options so that we can
- # check that only the defaults are returned.
- self.domain_config_api.create_config(self.domain['id'], self.config)
- url = '/domains/config/ldap/url/default'
- r = self.get(url)
- default_config = r.result['config']
- self.assertEqual(CONF.ldap.url, default_config['url'])
-
- def test_get_config_default_by_invalid_group(self):
- """Call ``GET for /domains/config/{bad-group}/default``."""
- # First try a valid group, but one we don't support for domain config
- self.get('/domains/config/resouce/default',
- expected_status=http_client.FORBIDDEN)
-
- # Now try a totally invalid group
- url = '/domains/config/%s/default' % uuid.uuid4().hex
- self.get(url, expected_status=http_client.FORBIDDEN)
-
- def test_get_config_default_by_invalid_option(self):
- """Call ``GET for /domains/config/{group}/{bad-option}/default``."""
- # First try a valid option, but one we don't support for domain config,
- # i.e. one that is in the sensitive options list
- self.get('/domains/config/ldap/password/default',
- expected_status=http_client.FORBIDDEN)
-
- # Now try a totally invalid option
- url = '/domains/config/ldap/%s/default' % uuid.uuid4().hex
- self.get(url, expected_status=http_client.FORBIDDEN)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
deleted file mode 100644
index 9fee8d2b..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from six.moves import http_client
-from testtools import matchers
-
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-class EndpointPolicyTestCase(test_v3.RestfulTestCase):
- """Test endpoint policy CRUD.
-
- In general, the controller layer of the endpoint policy extension is really
- just marshalling the data around the underlying manager calls. Given that
- the manager layer is tested in depth by the backend tests, the tests we
- execute here concentrate on ensuring we are correctly passing and
- presenting the data.
-
- """
-
- def setUp(self):
- super(EndpointPolicyTestCase, self).setUp()
- self.policy = unit.new_policy_ref()
- self.policy_api.create_policy(self.policy['id'], self.policy)
- self.service = unit.new_service_ref()
- self.catalog_api.create_service(self.service['id'], self.service)
- self.endpoint = unit.new_endpoint_ref(self.service['id'], enabled=True,
- interface='public',
- region_id=self.region_id)
- self.catalog_api.create_endpoint(self.endpoint['id'], self.endpoint)
- self.region = unit.new_region_ref()
- self.catalog_api.create_region(self.region)
-
- def assert_head_and_get_return_same_response(self, url, expected_status):
- self.get(url, expected_status=expected_status)
- self.head(url, expected_status=expected_status)
-
- # endpoint policy crud tests
- def _crud_test(self, url):
- # Test when the resource does not exist also ensures
- # that there is not a false negative after creation.
-
- self.assert_head_and_get_return_same_response(
- url,
- expected_status=http_client.NOT_FOUND)
-
- self.put(url)
-
- # test that the new resource is accessible.
- self.assert_head_and_get_return_same_response(
- url,
- expected_status=http_client.NO_CONTENT)
-
- self.delete(url)
-
- # test that the deleted resource is no longer accessible
- self.assert_head_and_get_return_same_response(
- url,
- expected_status=http_client.NOT_FOUND)
-
- def test_crud_for_policy_for_explicit_endpoint(self):
- """PUT, HEAD and DELETE for explicit endpoint policy."""
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/endpoints/%(endpoint_id)s') % {
- 'policy_id': self.policy['id'],
- 'endpoint_id': self.endpoint['id']}
- self._crud_test(url)
-
- def test_crud_for_policy_for_service(self):
- """PUT, HEAD and DELETE for service endpoint policy."""
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/services/%(service_id)s') % {
- 'policy_id': self.policy['id'],
- 'service_id': self.service['id']}
- self._crud_test(url)
-
- def test_crud_for_policy_for_region_and_service(self):
- """PUT, HEAD and DELETE for region and service endpoint policy."""
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/services/%(service_id)s/regions/%(region_id)s') % {
- 'policy_id': self.policy['id'],
- 'service_id': self.service['id'],
- 'region_id': self.region['id']}
- self._crud_test(url)
-
- def test_get_policy_for_endpoint(self):
- """GET /endpoints/{endpoint_id}/policy."""
- self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/endpoints/%(endpoint_id)s' % {
- 'policy_id': self.policy['id'],
- 'endpoint_id': self.endpoint['id']})
-
- self.head('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
- '/policy' % {
- 'endpoint_id': self.endpoint['id']},
- expected_status=http_client.OK)
-
- r = self.get('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
- '/policy' % {
- 'endpoint_id': self.endpoint['id']})
- self.assertValidPolicyResponse(r, ref=self.policy)
-
- def test_list_endpoints_for_policy(self):
- """GET /policies/%(policy_id}/endpoints."""
- self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/endpoints/%(endpoint_id)s' % {
- 'policy_id': self.policy['id'],
- 'endpoint_id': self.endpoint['id']})
-
- r = self.get('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/endpoints' % {
- 'policy_id': self.policy['id']})
- self.assertValidEndpointListResponse(r, ref=self.endpoint)
- self.assertThat(r.result.get('endpoints'), matchers.HasLength(1))
-
- def test_endpoint_association_cleanup_when_endpoint_deleted(self):
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/endpoints/%(endpoint_id)s') % {
- 'policy_id': self.policy['id'],
- 'endpoint_id': self.endpoint['id']}
-
- self.put(url)
- self.head(url)
-
- self.delete('/endpoints/%(endpoint_id)s' % {
- 'endpoint_id': self.endpoint['id']})
-
- self.head(url, expected_status=http_client.NOT_FOUND)
-
- def test_region_service_association_cleanup_when_region_deleted(self):
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/services/%(service_id)s/regions/%(region_id)s') % {
- 'policy_id': self.policy['id'],
- 'service_id': self.service['id'],
- 'region_id': self.region['id']}
-
- self.put(url)
- self.head(url)
-
- self.delete('/regions/%(region_id)s' % {
- 'region_id': self.region['id']})
-
- self.head(url, expected_status=http_client.NOT_FOUND)
-
- def test_region_service_association_cleanup_when_service_deleted(self):
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/services/%(service_id)s/regions/%(region_id)s') % {
- 'policy_id': self.policy['id'],
- 'service_id': self.service['id'],
- 'region_id': self.region['id']}
-
- self.put(url)
- self.head(url)
-
- self.delete('/services/%(service_id)s' % {
- 'service_id': self.service['id']})
-
- self.head(url, expected_status=http_client.NOT_FOUND)
-
- def test_service_association_cleanup_when_service_deleted(self):
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/services/%(service_id)s') % {
- 'policy_id': self.policy['id'],
- 'service_id': self.service['id']}
-
- self.put(url)
- self.get(url, expected_status=http_client.NO_CONTENT)
-
- self.delete('/policies/%(policy_id)s' % {
- 'policy_id': self.policy['id']})
-
- self.head(url, expected_status=http_client.NOT_FOUND)
-
- def test_service_association_cleanup_when_policy_deleted(self):
- url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
- '/services/%(service_id)s') % {
- 'policy_id': self.policy['id'],
- 'service_id': self.service['id']}
-
- self.put(url)
- self.get(url, expected_status=http_client.NO_CONTENT)
-
- self.delete('/services/%(service_id)s' % {
- 'service_id': self.service['id']})
-
- self.head(url, expected_status=http_client.NOT_FOUND)
-
-
-class JsonHomeTests(test_v3.JsonHomeTestMixin):
- EXTENSION_LOCATION = ('http://docs.openstack.org/api/openstack-identity/3/'
- 'ext/OS-ENDPOINT-POLICY/1.0/rel')
- PARAM_LOCATION = 'http://docs.openstack.org/api/openstack-identity/3/param'
-
- JSON_HOME_DATA = {
- EXTENSION_LOCATION + '/endpoint_policy': {
- 'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/'
- 'policy',
- 'href-vars': {
- 'endpoint_id': PARAM_LOCATION + '/endpoint_id',
- },
- },
- EXTENSION_LOCATION + '/policy_endpoints': {
- 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
- 'endpoints',
- 'href-vars': {
- 'policy_id': PARAM_LOCATION + '/policy_id',
- },
- },
- EXTENSION_LOCATION + '/endpoint_policy_association': {
- 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
- 'endpoints/{endpoint_id}',
- 'href-vars': {
- 'policy_id': PARAM_LOCATION + '/policy_id',
- 'endpoint_id': PARAM_LOCATION + '/endpoint_id',
- },
- },
- EXTENSION_LOCATION + '/service_policy_association': {
- 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
- 'services/{service_id}',
- 'href-vars': {
- 'policy_id': PARAM_LOCATION + '/policy_id',
- 'service_id': PARAM_LOCATION + '/service_id',
- },
- },
- EXTENSION_LOCATION + '/region_and_service_policy_association': {
- 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/'
- 'services/{service_id}/regions/{region_id}',
- 'href-vars': {
- 'policy_id': PARAM_LOCATION + '/policy_id',
- 'service_id': PARAM_LOCATION + '/service_id',
- 'region_id': PARAM_LOCATION + '/region_id',
- },
- },
- }
diff --git a/keystone-moon/keystone/tests/unit/test_v3_federation.py b/keystone-moon/keystone/tests/unit/test_v3_federation.py
deleted file mode 100644
index f4ec8e51..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_federation.py
+++ /dev/null
@@ -1,3722 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import os
-import random
-from testtools import matchers
-import uuid
-
-import fixtures
-from lxml import etree
-import mock
-from oslo_config import cfg
-from oslo_log import versionutils
-from oslo_serialization import jsonutils
-from oslo_utils import importutils
-from oslotest import mockpatch
-import saml2
-from saml2 import saml
-from saml2 import sigver
-from six.moves import http_client
-from six.moves import range, urllib, zip
-xmldsig = importutils.try_import("saml2.xmldsig")
-if not xmldsig:
- xmldsig = importutils.try_import("xmldsig")
-
-from keystone.auth import controllers as auth_controllers
-from keystone.common import environment
-from keystone.contrib.federation import routers
-from keystone import exception
-from keystone.federation import controllers as federation_controllers
-from keystone.federation import idp as keystone_idp
-from keystone import notifications
-from keystone.tests import unit
-from keystone.tests.unit import core
-from keystone.tests.unit import federation_fixtures
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit import mapping_fixtures
-from keystone.tests.unit import test_v3
-from keystone.tests.unit import utils
-from keystone.token.providers import common as token_common
-
-
-subprocess = environment.subprocess
-
-CONF = cfg.CONF
-ROOTDIR = os.path.dirname(os.path.abspath(__file__))
-XMLDIR = os.path.join(ROOTDIR, 'saml2/')
-
-
-def dummy_validator(*args, **kwargs):
- pass
-
-
-class FederationTests(test_v3.RestfulTestCase):
-
- @mock.patch.object(versionutils, 'report_deprecated_feature')
- def test_exception_happens(self, mock_deprecator):
- routers.FederationExtension(mock.ANY)
- mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("Remove federation_extension from", args[1])
-
-
-class FederatedSetupMixin(object):
-
- ACTION = 'authenticate'
- IDP = 'ORG_IDP'
- PROTOCOL = 'saml2'
- AUTH_METHOD = 'saml2'
- USER = 'user@ORGANIZATION'
- ASSERTION_PREFIX = 'PREFIX_'
- IDP_WITH_REMOTE = 'ORG_IDP_REMOTE'
- REMOTE_IDS = ['entityID_IDP1', 'entityID_IDP2']
- REMOTE_ID_ATTR = uuid.uuid4().hex
-
- UNSCOPED_V3_SAML2_REQ = {
- "identity": {
- "methods": [AUTH_METHOD],
- AUTH_METHOD: {
- "identity_provider": IDP,
- "protocol": PROTOCOL
- }
- }
- }
-
- def _check_domains_are_valid(self, token):
- self.assertEqual('Federated', token['user']['domain']['id'])
- self.assertEqual('Federated', token['user']['domain']['name'])
-
- def _project(self, project):
- return (project['id'], project['name'])
-
- def _roles(self, roles):
- return set([(r['id'], r['name']) for r in roles])
-
- def _check_projects_and_roles(self, token, roles, projects):
- """Check whether the projects and the roles match."""
- token_roles = token.get('roles')
- if token_roles is None:
- raise AssertionError('Roles not found in the token')
- token_roles = self._roles(token_roles)
- roles_ref = self._roles(roles)
- self.assertEqual(token_roles, roles_ref)
-
- token_projects = token.get('project')
- if token_projects is None:
- raise AssertionError('Projects not found in the token')
- token_projects = self._project(token_projects)
- projects_ref = self._project(projects)
- self.assertEqual(token_projects, projects_ref)
-
- def _check_scoped_token_attributes(self, token):
-
- for obj in ('user', 'catalog', 'expires_at', 'issued_at',
- 'methods', 'roles'):
- self.assertIn(obj, token)
-
- os_federation = token['user']['OS-FEDERATION']
-
- self.assertIn('groups', os_federation)
- self.assertIn('identity_provider', os_federation)
- self.assertIn('protocol', os_federation)
- self.assertThat(os_federation, matchers.HasLength(3))
-
- self.assertEqual(self.IDP, os_federation['identity_provider']['id'])
- self.assertEqual(self.PROTOCOL, os_federation['protocol']['id'])
-
- def _check_project_scoped_token_attributes(self, token, project_id):
- self.assertEqual(project_id, token['project']['id'])
- self._check_scoped_token_attributes(token)
-
- def _check_domain_scoped_token_attributes(self, token, domain_id):
- self.assertEqual(domain_id, token['domain']['id'])
- self._check_scoped_token_attributes(token)
-
- def assertValidMappedUser(self, token):
- """Check if user object meets all the criteria."""
- user = token['user']
- self.assertIn('id', user)
- self.assertIn('name', user)
- self.assertIn('domain', user)
-
- self.assertIn('groups', user['OS-FEDERATION'])
- self.assertIn('identity_provider', user['OS-FEDERATION'])
- self.assertIn('protocol', user['OS-FEDERATION'])
-
- # Make sure user_id is url safe
- self.assertEqual(urllib.parse.quote(user['name']), user['id'])
-
- def _issue_unscoped_token(self,
- idp=None,
- assertion='EMPLOYEE_ASSERTION',
- environment=None):
- api = federation_controllers.Auth()
- context = {'environment': environment or {}}
- self._inject_assertion(context, assertion)
- if idp is None:
- idp = self.IDP
- r = api.federated_authentication(context, idp, self.PROTOCOL)
- return r
-
- def idp_ref(self, id=None):
- idp = {
- 'id': id or uuid.uuid4().hex,
- 'enabled': True,
- 'description': uuid.uuid4().hex
- }
- return idp
-
- def proto_ref(self, mapping_id=None):
- proto = {
- 'id': uuid.uuid4().hex,
- 'mapping_id': mapping_id or uuid.uuid4().hex
- }
- return proto
-
- def mapping_ref(self, rules=None):
- return {
- 'id': uuid.uuid4().hex,
- 'rules': rules or self.rules['rules']
- }
-
- def _scope_request(self, unscoped_token_id, scope, scope_id):
- return {
- 'auth': {
- 'identity': {
- 'methods': [
- self.AUTH_METHOD
- ],
- self.AUTH_METHOD: {
- 'id': unscoped_token_id
- }
- },
- 'scope': {
- scope: {
- 'id': scope_id
- }
- }
- }
- }
-
- def _inject_assertion(self, context, variant, query_string=None):
- assertion = getattr(mapping_fixtures, variant)
- context['environment'].update(assertion)
- context['query_string'] = query_string or []
-
- def load_federation_sample_data(self):
- """Inject additional data."""
- # Create and add domains
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'],
- self.domainA)
-
- self.domainB = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainB['id'],
- self.domainB)
-
- self.domainC = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainC['id'],
- self.domainC)
-
- self.domainD = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainD['id'],
- self.domainD)
-
- # Create and add projects
- self.proj_employees = unit.new_project_ref(
- domain_id=self.domainA['id'])
- self.resource_api.create_project(self.proj_employees['id'],
- self.proj_employees)
- self.proj_customers = unit.new_project_ref(
- domain_id=self.domainA['id'])
- self.resource_api.create_project(self.proj_customers['id'],
- self.proj_customers)
-
- self.project_all = unit.new_project_ref(
- domain_id=self.domainA['id'])
- self.resource_api.create_project(self.project_all['id'],
- self.project_all)
-
- self.project_inherited = unit.new_project_ref(
- domain_id=self.domainD['id'])
- self.resource_api.create_project(self.project_inherited['id'],
- self.project_inherited)
-
- # Create and add groups
- self.group_employees = unit.new_group_ref(domain_id=self.domainA['id'])
- self.group_employees = (
- self.identity_api.create_group(self.group_employees))
-
- self.group_customers = unit.new_group_ref(domain_id=self.domainA['id'])
- self.group_customers = (
- self.identity_api.create_group(self.group_customers))
-
- self.group_admins = unit.new_group_ref(domain_id=self.domainA['id'])
- self.group_admins = self.identity_api.create_group(self.group_admins)
-
- # Create and add roles
- self.role_employee = unit.new_role_ref()
- self.role_api.create_role(self.role_employee['id'], self.role_employee)
- self.role_customer = unit.new_role_ref()
- self.role_api.create_role(self.role_customer['id'], self.role_customer)
-
- self.role_admin = unit.new_role_ref()
- self.role_api.create_role(self.role_admin['id'], self.role_admin)
-
- # Employees can access
- # * proj_employees
- # * project_all
- self.assignment_api.create_grant(self.role_employee['id'],
- group_id=self.group_employees['id'],
- project_id=self.proj_employees['id'])
- self.assignment_api.create_grant(self.role_employee['id'],
- group_id=self.group_employees['id'],
- project_id=self.project_all['id'])
- # Customers can access
- # * proj_customers
- self.assignment_api.create_grant(self.role_customer['id'],
- group_id=self.group_customers['id'],
- project_id=self.proj_customers['id'])
-
- # Admins can access:
- # * proj_customers
- # * proj_employees
- # * project_all
- self.assignment_api.create_grant(self.role_admin['id'],
- group_id=self.group_admins['id'],
- project_id=self.proj_customers['id'])
- self.assignment_api.create_grant(self.role_admin['id'],
- group_id=self.group_admins['id'],
- project_id=self.proj_employees['id'])
- self.assignment_api.create_grant(self.role_admin['id'],
- group_id=self.group_admins['id'],
- project_id=self.project_all['id'])
-
- self.assignment_api.create_grant(self.role_customer['id'],
- group_id=self.group_customers['id'],
- domain_id=self.domainA['id'])
-
- # Customers can access:
- # * domain A
- self.assignment_api.create_grant(self.role_customer['id'],
- group_id=self.group_customers['id'],
- domain_id=self.domainA['id'])
-
- # Customers can access projects via inheritance:
- # * domain D
- self.assignment_api.create_grant(self.role_customer['id'],
- group_id=self.group_customers['id'],
- domain_id=self.domainD['id'],
- inherited_to_projects=True)
-
- # Employees can access:
- # * domain A
- # * domain B
-
- self.assignment_api.create_grant(self.role_employee['id'],
- group_id=self.group_employees['id'],
- domain_id=self.domainA['id'])
- self.assignment_api.create_grant(self.role_employee['id'],
- group_id=self.group_employees['id'],
- domain_id=self.domainB['id'])
-
- # Admins can access:
- # * domain A
- # * domain B
- # * domain C
- self.assignment_api.create_grant(self.role_admin['id'],
- group_id=self.group_admins['id'],
- domain_id=self.domainA['id'])
- self.assignment_api.create_grant(self.role_admin['id'],
- group_id=self.group_admins['id'],
- domain_id=self.domainB['id'])
-
- self.assignment_api.create_grant(self.role_admin['id'],
- group_id=self.group_admins['id'],
- domain_id=self.domainC['id'])
- self.rules = {
- 'rules': [
- {
- 'local': [
- {
- 'group': {
- 'id': self.group_employees['id']
- }
- },
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'UserName'
- },
- {
- 'type': 'Email',
- },
- {
- 'type': 'orgPersonType',
- 'any_one_of': [
- 'Employee'
- ]
- }
- ]
- },
- {
- 'local': [
- {
- 'group': {
- 'id': self.group_employees['id']
- }
- },
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- }
- ],
- 'remote': [
- {
- 'type': self.ASSERTION_PREFIX + 'UserName'
- },
- {
- 'type': self.ASSERTION_PREFIX + 'Email',
- },
- {
- 'type': self.ASSERTION_PREFIX + 'orgPersonType',
- 'any_one_of': [
- 'SuperEmployee'
- ]
- }
- ]
- },
- {
- 'local': [
- {
- 'group': {
- 'id': self.group_customers['id']
- }
- },
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'UserName'
- },
- {
- 'type': 'Email'
- },
- {
- 'type': 'orgPersonType',
- 'any_one_of': [
- 'Customer'
- ]
- }
- ]
- },
- {
- 'local': [
- {
- 'group': {
- 'id': self.group_admins['id']
- }
- },
- {
- 'group': {
- 'id': self.group_employees['id']
- }
- },
- {
- 'group': {
- 'id': self.group_customers['id']
- }
- },
-
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'UserName'
- },
- {
- 'type': 'Email'
- },
- {
- 'type': 'orgPersonType',
- 'any_one_of': [
- 'Admin',
- 'Chief'
- ]
- }
- ]
- },
- {
- 'local': [
- {
- 'group': {
- 'id': uuid.uuid4().hex
- }
- },
- {
- 'group': {
- 'id': self.group_customers['id']
- }
- },
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'UserName',
- },
- {
- 'type': 'Email',
- },
- {
- 'type': 'FirstName',
- 'any_one_of': [
- 'Jill'
- ]
- },
- {
- 'type': 'LastName',
- 'any_one_of': [
- 'Smith'
- ]
- }
- ]
- },
- {
- 'local': [
- {
- 'group': {
- 'id': 'this_group_no_longer_exists'
- }
- },
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'UserName',
- },
- {
- 'type': 'Email',
- },
- {
- 'type': 'Email',
- 'any_one_of': [
- 'testacct@example.com'
- ]
- },
- {
- 'type': 'orgPersonType',
- 'any_one_of': [
- 'Tester'
- ]
- }
- ]
- },
- # rules with local group names
- {
- "local": [
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- },
- {
- "group": {
- "name": self.group_customers['name'],
- "domain": {
- "name": self.domainA['name']
- }
- }
- }
- ],
- "remote": [
- {
- 'type': 'UserName',
- },
- {
- 'type': 'Email',
- },
- {
- "type": "orgPersonType",
- "any_one_of": [
- "CEO",
- "CTO"
- ],
- }
- ]
- },
- {
- "local": [
- {
- 'user': {
- 'name': '{0}',
- 'id': '{1}'
- }
- },
- {
- "group": {
- "name": self.group_admins['name'],
- "domain": {
- "id": self.domainA['id']
- }
- }
- }
- ],
- "remote": [
- {
- "type": "UserName",
- },
- {
- "type": "Email",
- },
- {
- "type": "orgPersonType",
- "any_one_of": [
- "Managers"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "id": "{1}"
- }
- },
- {
- "group": {
- "name": "NON_EXISTING",
- "domain": {
- "id": self.domainA['id']
- }
- }
- }
- ],
- "remote": [
- {
- "type": "UserName",
- },
- {
- "type": "Email",
- },
- {
- "type": "UserName",
- "any_one_of": [
- "IamTester"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "user": {
- "type": "local",
- "name": self.user['name'],
- "domain": {
- "id": self.user['domain_id']
- }
- }
- },
- {
- "group": {
- "id": self.group_customers['id']
- }
- }
- ],
- "remote": [
- {
- "type": "UserType",
- "any_one_of": [
- "random"
- ]
- }
- ]
- },
- {
- "local": [
- {
- "user": {
- "type": "local",
- "name": self.user['name'],
- "domain": {
- "id": uuid.uuid4().hex
- }
- }
- }
- ],
- "remote": [
- {
- "type": "Position",
- "any_one_of": [
- "DirectorGeneral"
- ]
- }
- ]
- }
- ]
- }
-
- # Add IDP
- self.idp = self.idp_ref(id=self.IDP)
- self.federation_api.create_idp(self.idp['id'],
- self.idp)
- # Add IDP with remote
- self.idp_with_remote = self.idp_ref(id=self.IDP_WITH_REMOTE)
- self.idp_with_remote['remote_ids'] = self.REMOTE_IDS
- self.federation_api.create_idp(self.idp_with_remote['id'],
- self.idp_with_remote)
- # Add a mapping
- self.mapping = self.mapping_ref()
- self.federation_api.create_mapping(self.mapping['id'],
- self.mapping)
- # Add protocols
- self.proto_saml = self.proto_ref(mapping_id=self.mapping['id'])
- self.proto_saml['id'] = self.PROTOCOL
- self.federation_api.create_protocol(self.idp['id'],
- self.proto_saml['id'],
- self.proto_saml)
- # Add protocols IDP with remote
- self.federation_api.create_protocol(self.idp_with_remote['id'],
- self.proto_saml['id'],
- self.proto_saml)
- # Generate fake tokens
- context = {'environment': {}}
-
- self.tokens = {}
- VARIANTS = ('EMPLOYEE_ASSERTION', 'CUSTOMER_ASSERTION',
- 'ADMIN_ASSERTION')
- api = auth_controllers.Auth()
- for variant in VARIANTS:
- self._inject_assertion(context, variant)
- r = api.authenticate_for_token(context, self.UNSCOPED_V3_SAML2_REQ)
- self.tokens[variant] = r.headers.get('X-Subject-Token')
-
- self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN = self._scope_request(
- uuid.uuid4().hex, 'project', self.proj_customers['id'])
-
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE = self._scope_request(
- self.tokens['EMPLOYEE_ASSERTION'], 'project',
- self.proj_employees['id'])
-
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN = self._scope_request(
- self.tokens['ADMIN_ASSERTION'], 'project',
- self.proj_employees['id'])
-
- self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN = self._scope_request(
- self.tokens['ADMIN_ASSERTION'], 'project',
- self.proj_customers['id'])
-
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER = self._scope_request(
- self.tokens['CUSTOMER_ASSERTION'], 'project',
- self.proj_employees['id'])
-
- self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER = self._scope_request(
- self.tokens['CUSTOMER_ASSERTION'], 'project',
- self.project_inherited['id'])
-
- self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER = self._scope_request(
- self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainA['id'])
-
- self.TOKEN_SCOPE_DOMAIN_B_FROM_CUSTOMER = self._scope_request(
- self.tokens['CUSTOMER_ASSERTION'], 'domain',
- self.domainB['id'])
-
- self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER = self._scope_request(
- self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainD['id'])
-
- self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN = self._scope_request(
- self.tokens['ADMIN_ASSERTION'], 'domain', self.domainA['id'])
-
- self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN = self._scope_request(
- self.tokens['ADMIN_ASSERTION'], 'domain', self.domainB['id'])
-
- self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN = self._scope_request(
- self.tokens['ADMIN_ASSERTION'], 'domain',
- self.domainC['id'])
-
-
-class FederatedIdentityProviderTests(test_v3.RestfulTestCase):
- """A test class for Identity Providers."""
-
- idp_keys = ['description', 'enabled']
-
- default_body = {'description': None, 'enabled': True}
-
- def base_url(self, suffix=None):
- if suffix is not None:
- return '/OS-FEDERATION/identity_providers/' + str(suffix)
- return '/OS-FEDERATION/identity_providers'
-
- def _fetch_attribute_from_response(self, resp, parameter,
- assert_is_not_none=True):
- """Fetch single attribute from TestResponse object."""
- result = resp.result.get(parameter)
- if assert_is_not_none:
- self.assertIsNotNone(result)
- return result
-
- def _create_and_decapsulate_response(self, body=None):
- """Create IdP and fetch it's random id along with entity."""
- default_resp = self._create_default_idp(body=body)
- idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- self.assertIsNotNone(idp)
- idp_id = idp.get('id')
- return (idp_id, idp)
-
- def _get_idp(self, idp_id):
- """Fetch IdP entity based on its id."""
- url = self.base_url(suffix=idp_id)
- resp = self.get(url)
- return resp
-
- def _create_default_idp(self, body=None):
- """Create default IdP."""
- url = self.base_url(suffix=uuid.uuid4().hex)
- if body is None:
- body = self._http_idp_input()
- resp = self.put(url, body={'identity_provider': body},
- expected_status=http_client.CREATED)
- return resp
-
- def _http_idp_input(self, **kwargs):
- """Create default input for IdP data."""
- body = None
- if 'body' not in kwargs:
- body = self.default_body.copy()
- body['description'] = uuid.uuid4().hex
- else:
- body = kwargs['body']
- return body
-
- def _assign_protocol_to_idp(self, idp_id=None, proto=None, url=None,
- mapping_id=None, validate=True, **kwargs):
- if url is None:
- url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
- if idp_id is None:
- idp_id, _ = self._create_and_decapsulate_response()
- if proto is None:
- proto = uuid.uuid4().hex
- if mapping_id is None:
- mapping_id = uuid.uuid4().hex
- body = {'mapping_id': mapping_id}
- url = url % {'idp_id': idp_id, 'protocol_id': proto}
- resp = self.put(url, body={'protocol': body}, **kwargs)
- if validate:
- self.assertValidResponse(resp, 'protocol', dummy_validator,
- keys_to_check=['id', 'mapping_id'],
- ref={'id': proto,
- 'mapping_id': mapping_id})
- return (resp, idp_id, proto)
-
- def _get_protocol(self, idp_id, protocol_id):
- url = "%s/protocols/%s" % (idp_id, protocol_id)
- url = self.base_url(suffix=url)
- r = self.get(url)
- return r
-
- def test_create_idp(self):
- """Creates the IdentityProvider entity associated to remote_ids."""
- keys_to_check = list(self.idp_keys)
- body = self.default_body.copy()
- body['description'] = uuid.uuid4().hex
- resp = self._create_default_idp(body=body)
- self.assertValidResponse(resp, 'identity_provider', dummy_validator,
- keys_to_check=keys_to_check,
- ref=body)
-
- def test_create_idp_remote(self):
- """Creates the IdentityProvider entity associated to remote_ids."""
- keys_to_check = list(self.idp_keys)
- keys_to_check.append('remote_ids')
- body = self.default_body.copy()
- body['description'] = uuid.uuid4().hex
- body['remote_ids'] = [uuid.uuid4().hex,
- uuid.uuid4().hex,
- uuid.uuid4().hex]
- resp = self._create_default_idp(body=body)
- self.assertValidResponse(resp, 'identity_provider', dummy_validator,
- keys_to_check=keys_to_check,
- ref=body)
-
- def test_create_idp_remote_repeated(self):
- """Creates two IdentityProvider entities with some remote_ids
-
- A remote_id is the same for both so the second IdP is not
- created because of the uniqueness of the remote_ids
-
- Expect HTTP 409 Conflict code for the latter call.
-
- """
- body = self.default_body.copy()
- repeated_remote_id = uuid.uuid4().hex
- body['remote_ids'] = [uuid.uuid4().hex,
- uuid.uuid4().hex,
- uuid.uuid4().hex,
- repeated_remote_id]
- self._create_default_idp(body=body)
-
- url = self.base_url(suffix=uuid.uuid4().hex)
- body['remote_ids'] = [uuid.uuid4().hex,
- repeated_remote_id]
- resp = self.put(url, body={'identity_provider': body},
- expected_status=http_client.CONFLICT)
-
- resp_data = jsonutils.loads(resp.body)
- self.assertIn('Duplicate remote ID',
- resp_data.get('error', {}).get('message'))
-
- def test_create_idp_remote_empty(self):
- """Creates an IdP with empty remote_ids."""
- keys_to_check = list(self.idp_keys)
- keys_to_check.append('remote_ids')
- body = self.default_body.copy()
- body['description'] = uuid.uuid4().hex
- body['remote_ids'] = []
- resp = self._create_default_idp(body=body)
- self.assertValidResponse(resp, 'identity_provider', dummy_validator,
- keys_to_check=keys_to_check,
- ref=body)
-
- def test_create_idp_remote_none(self):
- """Creates an IdP with a None remote_ids."""
- keys_to_check = list(self.idp_keys)
- keys_to_check.append('remote_ids')
- body = self.default_body.copy()
- body['description'] = uuid.uuid4().hex
- body['remote_ids'] = None
- resp = self._create_default_idp(body=body)
- expected = body.copy()
- expected['remote_ids'] = []
- self.assertValidResponse(resp, 'identity_provider', dummy_validator,
- keys_to_check=keys_to_check,
- ref=expected)
-
- def test_update_idp_remote_ids(self):
- """Update IdP's remote_ids parameter."""
- body = self.default_body.copy()
- body['remote_ids'] = [uuid.uuid4().hex]
- default_resp = self._create_default_idp(body=body)
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp.get('id')
- url = self.base_url(suffix=idp_id)
- self.assertIsNotNone(idp_id)
-
- body['remote_ids'] = [uuid.uuid4().hex, uuid.uuid4().hex]
-
- body = {'identity_provider': body}
- resp = self.patch(url, body=body)
- updated_idp = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- body = body['identity_provider']
- self.assertEqual(sorted(body['remote_ids']),
- sorted(updated_idp.get('remote_ids')))
-
- resp = self.get(url)
- returned_idp = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- self.assertEqual(sorted(body['remote_ids']),
- sorted(returned_idp.get('remote_ids')))
-
- def test_update_idp_clean_remote_ids(self):
- """Update IdP's remote_ids parameter with an empty list."""
- body = self.default_body.copy()
- body['remote_ids'] = [uuid.uuid4().hex]
- default_resp = self._create_default_idp(body=body)
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp.get('id')
- url = self.base_url(suffix=idp_id)
- self.assertIsNotNone(idp_id)
-
- body['remote_ids'] = []
-
- body = {'identity_provider': body}
- resp = self.patch(url, body=body)
- updated_idp = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- body = body['identity_provider']
- self.assertEqual(sorted(body['remote_ids']),
- sorted(updated_idp.get('remote_ids')))
-
- resp = self.get(url)
- returned_idp = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- self.assertEqual(sorted(body['remote_ids']),
- sorted(returned_idp.get('remote_ids')))
-
- def test_update_idp_remote_repeated(self):
- """Update an IdentityProvider entity reusing a remote_id.
-
- A remote_id is the same for both so the second IdP is not
- updated because of the uniqueness of the remote_ids.
-
- Expect HTTP 409 Conflict code for the latter call.
-
- """
- # Create first identity provider
- body = self.default_body.copy()
- repeated_remote_id = uuid.uuid4().hex
- body['remote_ids'] = [uuid.uuid4().hex,
- repeated_remote_id]
- self._create_default_idp(body=body)
-
- # Create second identity provider (without remote_ids)
- body = self.default_body.copy()
- default_resp = self._create_default_idp(body=body)
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp.get('id')
- url = self.base_url(suffix=idp_id)
-
- body['remote_ids'] = [repeated_remote_id]
- resp = self.patch(url, body={'identity_provider': body},
- expected_status=http_client.CONFLICT)
- resp_data = jsonutils.loads(resp.body)
- self.assertIn('Duplicate remote ID',
- resp_data['error']['message'])
-
- def test_list_idps(self, iterations=5):
- """Lists all available IdentityProviders.
-
- This test collects ids of created IdPs and
- intersects it with the list of all available IdPs.
- List of all IdPs can be a superset of IdPs created in this test,
- because other tests also create IdPs.
-
- """
- def get_id(resp):
- r = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- return r.get('id')
-
- ids = []
- for _ in range(iterations):
- id = get_id(self._create_default_idp())
- ids.append(id)
- ids = set(ids)
-
- keys_to_check = self.idp_keys
- url = self.base_url()
- resp = self.get(url)
- self.assertValidListResponse(resp, 'identity_providers',
- dummy_validator,
- keys_to_check=keys_to_check)
- entities = self._fetch_attribute_from_response(resp,
- 'identity_providers')
- entities_ids = set([e['id'] for e in entities])
- ids_intersection = entities_ids.intersection(ids)
- self.assertEqual(ids_intersection, ids)
-
- def test_filter_list_idp_by_id(self):
- def get_id(resp):
- r = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- return r.get('id')
-
- idp1_id = get_id(self._create_default_idp())
- idp2_id = get_id(self._create_default_idp())
-
- # list the IdP, should get two IdP.
- url = self.base_url()
- resp = self.get(url)
- entities = self._fetch_attribute_from_response(resp,
- 'identity_providers')
- entities_ids = [e['id'] for e in entities]
- self.assertItemsEqual(entities_ids, [idp1_id, idp2_id])
-
- # filter the IdP by ID.
- url = self.base_url() + '?id=' + idp1_id
- resp = self.get(url)
- filtered_service_list = resp.json['identity_providers']
- self.assertThat(filtered_service_list, matchers.HasLength(1))
- self.assertEqual(idp1_id, filtered_service_list[0].get('id'))
-
- def test_filter_list_idp_by_enabled(self):
- def get_id(resp):
- r = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- return r.get('id')
-
- idp1_id = get_id(self._create_default_idp())
-
- body = self.default_body.copy()
- body['enabled'] = False
- idp2_id = get_id(self._create_default_idp(body=body))
-
- # list the IdP, should get two IdP.
- url = self.base_url()
- resp = self.get(url)
- entities = self._fetch_attribute_from_response(resp,
- 'identity_providers')
- entities_ids = [e['id'] for e in entities]
- self.assertItemsEqual(entities_ids, [idp1_id, idp2_id])
-
- # filter the IdP by 'enabled'.
- url = self.base_url() + '?enabled=True'
- resp = self.get(url)
- filtered_service_list = resp.json['identity_providers']
- self.assertThat(filtered_service_list, matchers.HasLength(1))
- self.assertEqual(idp1_id, filtered_service_list[0].get('id'))
-
- def test_check_idp_uniqueness(self):
- """Add same IdP twice.
-
- Expect HTTP 409 Conflict code for the latter call.
-
- """
- url = self.base_url(suffix=uuid.uuid4().hex)
- body = self._http_idp_input()
- self.put(url, body={'identity_provider': body},
- expected_status=http_client.CREATED)
- resp = self.put(url, body={'identity_provider': body},
- expected_status=http_client.CONFLICT)
-
- resp_data = jsonutils.loads(resp.body)
- self.assertIn('Duplicate entry',
- resp_data.get('error', {}).get('message'))
-
- def test_get_idp(self):
- """Create and later fetch IdP."""
- body = self._http_idp_input()
- default_resp = self._create_default_idp(body=body)
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp.get('id')
- url = self.base_url(suffix=idp_id)
- resp = self.get(url)
- self.assertValidResponse(resp, 'identity_provider',
- dummy_validator, keys_to_check=body.keys(),
- ref=body)
-
- def test_get_nonexisting_idp(self):
- """Fetch nonexisting IdP entity.
-
- Expected HTTP 404 Not Found status code.
-
- """
- idp_id = uuid.uuid4().hex
- self.assertIsNotNone(idp_id)
-
- url = self.base_url(suffix=idp_id)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_delete_existing_idp(self):
- """Create and later delete IdP.
-
- Expect HTTP 404 Not Found for the GET IdP call.
- """
- default_resp = self._create_default_idp()
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp.get('id')
- self.assertIsNotNone(idp_id)
- url = self.base_url(suffix=idp_id)
- self.delete(url)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_delete_idp_also_deletes_assigned_protocols(self):
- """Deleting an IdP will delete its assigned protocol."""
- # create default IdP
- default_resp = self._create_default_idp()
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp['id']
- protocol_id = uuid.uuid4().hex
-
- url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
- idp_url = self.base_url(suffix=idp_id)
-
- # assign protocol to IdP
- kwargs = {'expected_status': http_client.CREATED}
- resp, idp_id, proto = self._assign_protocol_to_idp(
- url=url,
- idp_id=idp_id,
- proto=protocol_id,
- **kwargs)
-
- # removing IdP will remove the assigned protocol as well
- self.assertEqual(1, len(self.federation_api.list_protocols(idp_id)))
- self.delete(idp_url)
- self.get(idp_url, expected_status=http_client.NOT_FOUND)
- self.assertEqual(0, len(self.federation_api.list_protocols(idp_id)))
-
- def test_delete_nonexisting_idp(self):
- """Delete nonexisting IdP.
-
- Expect HTTP 404 Not Found for the GET IdP call.
- """
- idp_id = uuid.uuid4().hex
- url = self.base_url(suffix=idp_id)
- self.delete(url, expected_status=http_client.NOT_FOUND)
-
- def test_update_idp_mutable_attributes(self):
- """Update IdP's mutable parameters."""
- default_resp = self._create_default_idp()
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp.get('id')
- url = self.base_url(suffix=idp_id)
- self.assertIsNotNone(idp_id)
-
- _enabled = not default_idp.get('enabled')
- body = {'remote_ids': [uuid.uuid4().hex, uuid.uuid4().hex],
- 'description': uuid.uuid4().hex,
- 'enabled': _enabled}
-
- body = {'identity_provider': body}
- resp = self.patch(url, body=body)
- updated_idp = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- body = body['identity_provider']
- for key in body.keys():
- if isinstance(body[key], list):
- self.assertEqual(sorted(body[key]),
- sorted(updated_idp.get(key)))
- else:
- self.assertEqual(body[key], updated_idp.get(key))
-
- resp = self.get(url)
- updated_idp = self._fetch_attribute_from_response(resp,
- 'identity_provider')
- for key in body.keys():
- if isinstance(body[key], list):
- self.assertEqual(sorted(body[key]),
- sorted(updated_idp.get(key)))
- else:
- self.assertEqual(body[key], updated_idp.get(key))
-
- def test_update_idp_immutable_attributes(self):
- """Update IdP's immutable parameters.
-
- Expect HTTP BAD REQUEST.
-
- """
- default_resp = self._create_default_idp()
- default_idp = self._fetch_attribute_from_response(default_resp,
- 'identity_provider')
- idp_id = default_idp.get('id')
- self.assertIsNotNone(idp_id)
-
- body = self._http_idp_input()
- body['id'] = uuid.uuid4().hex
- body['protocols'] = [uuid.uuid4().hex, uuid.uuid4().hex]
-
- url = self.base_url(suffix=idp_id)
- self.patch(url, body={'identity_provider': body},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_nonexistent_idp(self):
- """Update nonexistent IdP
-
- Expect HTTP 404 Not Found code.
-
- """
- idp_id = uuid.uuid4().hex
- url = self.base_url(suffix=idp_id)
- body = self._http_idp_input()
- body['enabled'] = False
- body = {'identity_provider': body}
-
- self.patch(url, body=body, expected_status=http_client.NOT_FOUND)
-
- def test_assign_protocol_to_idp(self):
- """Assign a protocol to existing IdP."""
- self._assign_protocol_to_idp(expected_status=http_client.CREATED)
-
- def test_protocol_composite_pk(self):
- """Test that Keystone can add two entities.
-
- The entities have identical names, however, attached to different
- IdPs.
-
- 1. Add IdP and assign it protocol with predefined name
- 2. Add another IdP and assign it a protocol with same name.
-
- Expect HTTP 201 code
-
- """
- url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
-
- kwargs = {'expected_status': http_client.CREATED}
- self._assign_protocol_to_idp(proto='saml2',
- url=url, **kwargs)
-
- self._assign_protocol_to_idp(proto='saml2',
- url=url, **kwargs)
-
- def test_protocol_idp_pk_uniqueness(self):
- """Test whether Keystone checks for unique idp/protocol values.
-
- Add same protocol twice, expect Keystone to reject a latter call and
- return HTTP 409 Conflict code.
-
- """
- url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
-
- kwargs = {'expected_status': http_client.CREATED}
- resp, idp_id, proto = self._assign_protocol_to_idp(proto='saml2',
- url=url, **kwargs)
- kwargs = {'expected_status': http_client.CONFLICT}
- resp, idp_id, proto = self._assign_protocol_to_idp(idp_id=idp_id,
- proto='saml2',
- validate=False,
- url=url, **kwargs)
-
- def test_assign_protocol_to_nonexistent_idp(self):
- """Assign protocol to IdP that doesn't exist.
-
- Expect HTTP 404 Not Found code.
-
- """
- idp_id = uuid.uuid4().hex
- kwargs = {'expected_status': http_client.NOT_FOUND}
- self._assign_protocol_to_idp(proto='saml2',
- idp_id=idp_id,
- validate=False,
- **kwargs)
-
- def test_get_protocol(self):
- """Create and later fetch protocol tied to IdP."""
- resp, idp_id, proto = self._assign_protocol_to_idp(
- expected_status=http_client.CREATED)
- proto_id = self._fetch_attribute_from_response(resp, 'protocol')['id']
- url = "%s/protocols/%s" % (idp_id, proto_id)
- url = self.base_url(suffix=url)
-
- resp = self.get(url)
-
- reference = {'id': proto_id}
- self.assertValidResponse(resp, 'protocol',
- dummy_validator,
- keys_to_check=reference.keys(),
- ref=reference)
-
- def test_list_protocols(self):
- """Create set of protocols and later list them.
-
- Compare input and output id sets.
-
- """
- resp, idp_id, proto = self._assign_protocol_to_idp(
- expected_status=http_client.CREATED)
- iterations = random.randint(0, 16)
- protocol_ids = []
- for _ in range(iterations):
- resp, _, proto = self._assign_protocol_to_idp(
- idp_id=idp_id,
- expected_status=http_client.CREATED)
- proto_id = self._fetch_attribute_from_response(resp, 'protocol')
- proto_id = proto_id['id']
- protocol_ids.append(proto_id)
-
- url = "%s/protocols" % idp_id
- url = self.base_url(suffix=url)
- resp = self.get(url)
- self.assertValidListResponse(resp, 'protocols',
- dummy_validator,
- keys_to_check=['id'])
- entities = self._fetch_attribute_from_response(resp, 'protocols')
- entities = set([entity['id'] for entity in entities])
- protocols_intersection = entities.intersection(protocol_ids)
- self.assertEqual(protocols_intersection, set(protocol_ids))
-
- def test_update_protocols_attribute(self):
- """Update protocol's attribute."""
- resp, idp_id, proto = self._assign_protocol_to_idp(
- expected_status=http_client.CREATED)
- new_mapping_id = uuid.uuid4().hex
-
- url = "%s/protocols/%s" % (idp_id, proto)
- url = self.base_url(suffix=url)
- body = {'mapping_id': new_mapping_id}
- resp = self.patch(url, body={'protocol': body})
- self.assertValidResponse(resp, 'protocol', dummy_validator,
- keys_to_check=['id', 'mapping_id'],
- ref={'id': proto,
- 'mapping_id': new_mapping_id}
- )
-
- def test_delete_protocol(self):
- """Delete protocol.
-
- Expect HTTP 404 Not Found code for the GET call after the protocol is
- deleted.
-
- """
- url = self.base_url(suffix='/%(idp_id)s/'
- 'protocols/%(protocol_id)s')
- resp, idp_id, proto = self._assign_protocol_to_idp(
- expected_status=http_client.CREATED)
- url = url % {'idp_id': idp_id,
- 'protocol_id': proto}
- self.delete(url)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
-
-class MappingCRUDTests(test_v3.RestfulTestCase):
- """A class for testing CRUD operations for Mappings."""
-
- MAPPING_URL = '/OS-FEDERATION/mappings/'
-
- def assertValidMappingListResponse(self, resp, *args, **kwargs):
- return self.assertValidListResponse(
- resp,
- 'mappings',
- self.assertValidMapping,
- keys_to_check=[],
- *args,
- **kwargs)
-
- def assertValidMappingResponse(self, resp, *args, **kwargs):
- return self.assertValidResponse(
- resp,
- 'mapping',
- self.assertValidMapping,
- keys_to_check=[],
- *args,
- **kwargs)
-
- def assertValidMapping(self, entity, ref=None):
- self.assertIsNotNone(entity.get('id'))
- self.assertIsNotNone(entity.get('rules'))
- if ref:
- self.assertEqual(entity['rules'], ref['rules'])
- return entity
-
- def _create_default_mapping_entry(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- resp = self.put(url,
- body={'mapping': mapping_fixtures.MAPPING_LARGE},
- expected_status=http_client.CREATED)
- return resp
-
- def _get_id_from_response(self, resp):
- r = resp.result.get('mapping')
- return r.get('id')
-
- def test_mapping_create(self):
- resp = self._create_default_mapping_entry()
- self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_LARGE)
-
- def test_mapping_list(self):
- url = self.MAPPING_URL
- self._create_default_mapping_entry()
- resp = self.get(url)
- entities = resp.result.get('mappings')
- self.assertIsNotNone(entities)
- self.assertResponseStatus(resp, http_client.OK)
- self.assertValidListLinks(resp.result.get('links'))
- self.assertEqual(1, len(entities))
-
- def test_mapping_delete(self):
- url = self.MAPPING_URL + '%(mapping_id)s'
- resp = self._create_default_mapping_entry()
- mapping_id = self._get_id_from_response(resp)
- url = url % {'mapping_id': str(mapping_id)}
- resp = self.delete(url)
- self.assertResponseStatus(resp, http_client.NO_CONTENT)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_mapping_get(self):
- url = self.MAPPING_URL + '%(mapping_id)s'
- resp = self._create_default_mapping_entry()
- mapping_id = self._get_id_from_response(resp)
- url = url % {'mapping_id': mapping_id}
- resp = self.get(url)
- self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_LARGE)
-
- def test_mapping_update(self):
- url = self.MAPPING_URL + '%(mapping_id)s'
- resp = self._create_default_mapping_entry()
- mapping_id = self._get_id_from_response(resp)
- url = url % {'mapping_id': mapping_id}
- resp = self.patch(url,
- body={'mapping': mapping_fixtures.MAPPING_SMALL})
- self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL)
- resp = self.get(url)
- self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL)
-
- def test_delete_mapping_dne(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.delete(url, expected_status=http_client.NOT_FOUND)
-
- def test_get_mapping_dne(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_create_mapping_bad_requirements(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_BAD_REQ})
-
- def test_create_mapping_no_rules(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_NO_RULES})
-
- def test_create_mapping_no_remote_objects(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_NO_REMOTE})
-
- def test_create_mapping_bad_value(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_BAD_VALUE})
-
- def test_create_mapping_missing_local(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_MISSING_LOCAL})
-
- def test_create_mapping_missing_type(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_MISSING_TYPE})
-
- def test_create_mapping_wrong_type(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_WRONG_TYPE})
-
- def test_create_mapping_extra_remote_properties_not_any_of(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping})
-
- def test_create_mapping_extra_remote_properties_any_one_of(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping})
-
- def test_create_mapping_extra_remote_properties_just_type(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping})
-
- def test_create_mapping_empty_map(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': {}})
-
- def test_create_mapping_extra_rules_properties(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping_fixtures.MAPPING_EXTRA_RULES_PROPS})
-
- def test_create_mapping_with_blacklist_and_whitelist(self):
- """Test for adding whitelist and blacklist in the rule
-
- Server should respond with HTTP 400 Bad Request error upon discovering
- both ``whitelist`` and ``blacklist`` keywords in the same rule.
-
- """
- url = self.MAPPING_URL + uuid.uuid4().hex
- mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_AND_BLACKLIST
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': mapping})
-
- def test_create_mapping_with_local_user_and_local_domain(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- resp = self.put(
- url,
- body={
- 'mapping': mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
- },
- expected_status=http_client.CREATED)
- self.assertValidMappingResponse(
- resp, mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN)
-
- def test_create_mapping_with_ephemeral(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- resp = self.put(
- url,
- body={'mapping': mapping_fixtures.MAPPING_EPHEMERAL_USER},
- expected_status=http_client.CREATED)
- self.assertValidMappingResponse(
- resp, mapping_fixtures.MAPPING_EPHEMERAL_USER)
-
- def test_create_mapping_with_bad_user_type(self):
- url = self.MAPPING_URL + uuid.uuid4().hex
- # get a copy of a known good map
- bad_mapping = copy.deepcopy(mapping_fixtures.MAPPING_EPHEMERAL_USER)
- # now sabotage the user type
- bad_mapping['rules'][0]['local'][0]['user']['type'] = uuid.uuid4().hex
- self.put(url, expected_status=http_client.BAD_REQUEST,
- body={'mapping': bad_mapping})
-
-
-class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin):
-
- def auth_plugin_config_override(self):
- methods = ['saml2']
- super(FederatedTokenTests, self).auth_plugin_config_override(methods)
-
- def setUp(self):
- super(FederatedTokenTests, self).setUp()
- self._notifications = []
-
- def fake_saml_notify(action, context, user_id, group_ids,
- identity_provider, protocol, token_id, outcome):
- note = {
- 'action': action,
- 'user_id': user_id,
- 'identity_provider': identity_provider,
- 'protocol': protocol,
- 'send_notification_called': True}
- self._notifications.append(note)
-
- self.useFixture(mockpatch.PatchObject(
- notifications,
- 'send_saml_audit_notification',
- fake_saml_notify))
-
- def _assert_last_notify(self, action, identity_provider, protocol,
- user_id=None):
- self.assertTrue(self._notifications)
- note = self._notifications[-1]
- if user_id:
- self.assertEqual(note['user_id'], user_id)
- self.assertEqual(note['action'], action)
- self.assertEqual(note['identity_provider'], identity_provider)
- self.assertEqual(note['protocol'], protocol)
- self.assertTrue(note['send_notification_called'])
-
- def load_fixtures(self, fixtures):
- super(FederatedTokenTests, self).load_fixtures(fixtures)
- self.load_federation_sample_data()
-
- def test_issue_unscoped_token_notify(self):
- self._issue_unscoped_token()
- self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL)
-
- def test_issue_unscoped_token(self):
- r = self._issue_unscoped_token()
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
- self.assertValidMappedUser(r.json['token'])
-
- def test_issue_unscoped_token_disabled_idp(self):
- """Checks if authentication works with disabled identity providers.
-
- Test plan:
- 1) Disable default IdP
- 2) Try issuing unscoped token for that IdP
- 3) Expect server to forbid authentication
-
- """
- enabled_false = {'enabled': False}
- self.federation_api.update_idp(self.IDP, enabled_false)
- self.assertRaises(exception.Forbidden,
- self._issue_unscoped_token)
-
- def test_issue_unscoped_token_group_names_in_mapping(self):
- r = self._issue_unscoped_token(assertion='ANOTHER_CUSTOMER_ASSERTION')
- ref_groups = set([self.group_customers['id'], self.group_admins['id']])
- token_resp = r.json_body
- token_groups = token_resp['token']['user']['OS-FEDERATION']['groups']
- token_groups = set([group['id'] for group in token_groups])
- self.assertEqual(ref_groups, token_groups)
-
- def test_issue_unscoped_tokens_nonexisting_group(self):
- self.assertRaises(exception.MissingGroups,
- self._issue_unscoped_token,
- assertion='ANOTHER_TESTER_ASSERTION')
-
- def test_issue_unscoped_token_with_remote_no_attribute(self):
- r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
- environment={
- self.REMOTE_ID_ATTR:
- self.REMOTE_IDS[0]
- })
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
-
- def test_issue_unscoped_token_with_remote(self):
- self.config_fixture.config(group='federation',
- remote_id_attribute=self.REMOTE_ID_ATTR)
- r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
- environment={
- self.REMOTE_ID_ATTR:
- self.REMOTE_IDS[0]
- })
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
-
- def test_issue_unscoped_token_with_saml2_remote(self):
- self.config_fixture.config(group='saml2',
- remote_id_attribute=self.REMOTE_ID_ATTR)
- r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
- environment={
- self.REMOTE_ID_ATTR:
- self.REMOTE_IDS[0]
- })
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
-
- def test_issue_unscoped_token_with_remote_different(self):
- self.config_fixture.config(group='federation',
- remote_id_attribute=self.REMOTE_ID_ATTR)
- self.assertRaises(exception.Forbidden,
- self._issue_unscoped_token,
- idp=self.IDP_WITH_REMOTE,
- environment={
- self.REMOTE_ID_ATTR: uuid.uuid4().hex
- })
-
- def test_issue_unscoped_token_with_remote_default_overwritten(self):
- """Test that protocol remote_id_attribute has higher priority.
-
- Make sure the parameter stored under ``protocol`` section has higher
- priority over parameter from default ``federation`` configuration
- section.
-
- """
- self.config_fixture.config(group='saml2',
- remote_id_attribute=self.REMOTE_ID_ATTR)
- self.config_fixture.config(group='federation',
- remote_id_attribute=uuid.uuid4().hex)
- r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE,
- environment={
- self.REMOTE_ID_ATTR:
- self.REMOTE_IDS[0]
- })
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
-
- def test_issue_unscoped_token_with_remote_unavailable(self):
- self.config_fixture.config(group='federation',
- remote_id_attribute=self.REMOTE_ID_ATTR)
- self.assertRaises(exception.Unauthorized,
- self._issue_unscoped_token,
- idp=self.IDP_WITH_REMOTE,
- environment={
- uuid.uuid4().hex: uuid.uuid4().hex
- })
-
- def test_issue_unscoped_token_with_remote_user_as_empty_string(self):
- # make sure that REMOTE_USER set as the empty string won't interfere
- r = self._issue_unscoped_token(environment={'REMOTE_USER': ''})
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
-
- def test_issue_unscoped_token_no_groups(self):
- self.assertRaises(exception.Unauthorized,
- self._issue_unscoped_token,
- assertion='BAD_TESTER_ASSERTION')
-
- def test_issue_unscoped_token_malformed_environment(self):
- """Test whether non string objects are filtered out.
-
- Put non string objects into the environment, inject
- correct assertion and try to get an unscoped token.
- Expect server not to fail on using split() method on
- non string objects and return token id in the HTTP header.
-
- """
- api = auth_controllers.Auth()
- context = {
- 'environment': {
- 'malformed_object': object(),
- 'another_bad_idea': tuple(range(10)),
- 'yet_another_bad_param': dict(zip(uuid.uuid4().hex,
- range(32)))
- }
- }
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION')
- r = api.authenticate_for_token(context, self.UNSCOPED_V3_SAML2_REQ)
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
-
- def test_scope_to_project_once_notify(self):
- r = self.v3_create_token(
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
- user_id = r.json['token']['user']['id']
- self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL, user_id)
-
- def test_scope_to_project_once(self):
- r = self.v3_create_token(
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
- token_resp = r.result['token']
- project_id = token_resp['project']['id']
- self._check_project_scoped_token_attributes(token_resp, project_id)
- roles_ref = [self.role_employee]
-
- projects_ref = self.proj_employees
- self._check_projects_and_roles(token_resp, roles_ref, projects_ref)
- self.assertValidMappedUser(token_resp)
-
- def test_scope_token_with_idp_disabled(self):
- """Scope token issued by disabled IdP.
-
- Try scoping the token issued by an IdP which is disabled now. Expect
- server to refuse scoping operation.
-
- This test confirms correct behaviour when IdP was enabled and unscoped
- token was issued, but disabled before user tries to scope the token.
- Here we assume the unscoped token was already issued and start from
- the moment where IdP is being disabled and unscoped token is being
- used.
-
- Test plan:
- 1) Disable IdP
- 2) Try scoping unscoped token
-
- """
- enabled_false = {'enabled': False}
- self.federation_api.update_idp(self.IDP, enabled_false)
- self.v3_create_token(
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER,
- expected_status=http_client.FORBIDDEN)
-
- def test_scope_to_bad_project(self):
- """Scope unscoped token with a project we don't have access to."""
- self.v3_create_token(
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_scope_to_project_multiple_times(self):
- """Try to scope the unscoped token multiple times.
-
- The new tokens should be scoped to:
-
- * Customers' project
- * Employees' project
-
- """
- bodies = (self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN,
- self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN)
- project_ids = (self.proj_employees['id'],
- self.proj_customers['id'])
- for body, project_id_ref in zip(bodies, project_ids):
- r = self.v3_create_token(body)
- token_resp = r.result['token']
- self._check_project_scoped_token_attributes(token_resp,
- project_id_ref)
-
- def test_scope_to_project_with_only_inherited_roles(self):
- """Try to scope token whose only roles are inherited."""
- self.config_fixture.config(group='os_inherit', enabled=True)
- r = self.v3_create_token(
- self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER)
- token_resp = r.result['token']
- self._check_project_scoped_token_attributes(
- token_resp, self.project_inherited['id'])
- roles_ref = [self.role_customer]
- projects_ref = self.project_inherited
- self._check_projects_and_roles(token_resp, roles_ref, projects_ref)
- self.assertValidMappedUser(token_resp)
-
- def test_scope_token_from_nonexistent_unscoped_token(self):
- """Try to scope token from non-existent unscoped token."""
- self.v3_create_token(
- self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN,
- expected_status=http_client.NOT_FOUND)
-
- def test_issue_token_from_rules_without_user(self):
- api = auth_controllers.Auth()
- context = {'environment': {}}
- self._inject_assertion(context, 'BAD_TESTER_ASSERTION')
- self.assertRaises(exception.Unauthorized,
- api.authenticate_for_token,
- context, self.UNSCOPED_V3_SAML2_REQ)
-
- def test_issue_token_with_nonexistent_group(self):
- """Inject assertion that matches rule issuing bad group id.
-
- Expect server to find out that some groups are missing in the
- backend and raise exception.MappedGroupNotFound exception.
-
- """
- self.assertRaises(exception.MappedGroupNotFound,
- self._issue_unscoped_token,
- assertion='CONTRACTOR_ASSERTION')
-
- def test_scope_to_domain_once(self):
- r = self.v3_create_token(self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER)
- token_resp = r.result['token']
- self._check_domain_scoped_token_attributes(token_resp,
- self.domainA['id'])
-
- def test_scope_to_domain_multiple_tokens(self):
- """Issue multiple tokens scoping to different domains.
-
- The new tokens should be scoped to:
-
- * domainA
- * domainB
- * domainC
-
- """
- bodies = (self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN,
- self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN,
- self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN)
- domain_ids = (self.domainA['id'],
- self.domainB['id'],
- self.domainC['id'])
-
- for body, domain_id_ref in zip(bodies, domain_ids):
- r = self.v3_create_token(body)
- token_resp = r.result['token']
- self._check_domain_scoped_token_attributes(token_resp,
- domain_id_ref)
-
- def test_scope_to_domain_with_only_inherited_roles_fails(self):
- """Try to scope to a domain that has no direct roles."""
- self.v3_create_token(
- self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_list_projects(self):
- urls = ('/OS-FEDERATION/projects', '/auth/projects')
-
- token = (self.tokens['CUSTOMER_ASSERTION'],
- self.tokens['EMPLOYEE_ASSERTION'],
- self.tokens['ADMIN_ASSERTION'])
-
- self.config_fixture.config(group='os_inherit', enabled=True)
- projects_refs = (set([self.proj_customers['id'],
- self.project_inherited['id']]),
- set([self.proj_employees['id'],
- self.project_all['id']]),
- set([self.proj_employees['id'],
- self.project_all['id'],
- self.proj_customers['id'],
- self.project_inherited['id']]))
-
- for token, projects_ref in zip(token, projects_refs):
- for url in urls:
- r = self.get(url, token=token)
- projects_resp = r.result['projects']
- projects = set(p['id'] for p in projects_resp)
- self.assertEqual(projects_ref, projects,
- 'match failed for url %s' % url)
-
- # TODO(samueldmq): Create another test class for role inheritance tests.
- # The advantage would be to reduce the complexity of this test class and
- # have tests specific to this functionality grouped, easing readability and
- # maintenability.
- def test_list_projects_for_inherited_project_assignment(self):
- # Enable os_inherit extension
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- # Create a subproject
- subproject_inherited = unit.new_project_ref(
- domain_id=self.domainD['id'],
- parent_id=self.project_inherited['id'])
- self.resource_api.create_project(subproject_inherited['id'],
- subproject_inherited)
-
- # Create an inherited role assignment
- self.assignment_api.create_grant(
- role_id=self.role_employee['id'],
- group_id=self.group_employees['id'],
- project_id=self.project_inherited['id'],
- inherited_to_projects=True)
-
- # Define expected projects from employee assertion, which contain
- # the created subproject
- expected_project_ids = [self.project_all['id'],
- self.proj_employees['id'],
- subproject_inherited['id']]
-
- # Assert expected projects for both available URLs
- for url in ('/OS-FEDERATION/projects', '/auth/projects'):
- r = self.get(url, token=self.tokens['EMPLOYEE_ASSERTION'])
- project_ids = [project['id'] for project in r.result['projects']]
-
- self.assertEqual(len(expected_project_ids), len(project_ids))
- for expected_project_id in expected_project_ids:
- self.assertIn(expected_project_id, project_ids,
- 'Projects match failed for url %s' % url)
-
- def test_list_domains(self):
- urls = ('/OS-FEDERATION/domains', '/auth/domains')
-
- tokens = (self.tokens['CUSTOMER_ASSERTION'],
- self.tokens['EMPLOYEE_ASSERTION'],
- self.tokens['ADMIN_ASSERTION'])
-
- # NOTE(henry-nash): domain D does not appear in the expected results
- # since it only had inherited roles (which only apply to projects
- # within the domain)
-
- domain_refs = (set([self.domainA['id']]),
- set([self.domainA['id'],
- self.domainB['id']]),
- set([self.domainA['id'],
- self.domainB['id'],
- self.domainC['id']]))
-
- for token, domains_ref in zip(tokens, domain_refs):
- for url in urls:
- r = self.get(url, token=token)
- domains_resp = r.result['domains']
- domains = set(p['id'] for p in domains_resp)
- self.assertEqual(domains_ref, domains,
- 'match failed for url %s' % url)
-
- @utils.wip('This will fail because of bug #1501032. The returned method'
- 'list should contain "saml2". This is documented in bug '
- '1501032.')
- def test_full_workflow(self):
- """Test 'standard' workflow for granting access tokens.
-
- * Issue unscoped token
- * List available projects based on groups
- * Scope token to one of available projects
-
- """
- r = self._issue_unscoped_token()
- token_resp = r.json_body['token']
- # NOTE(lbragstad): Ensure only 'saml2' is in the method list.
- self.assertListEqual(['saml2'], token_resp['methods'])
- self.assertValidMappedUser(token_resp)
- employee_unscoped_token_id = r.headers.get('X-Subject-Token')
- r = self.get('/auth/projects', token=employee_unscoped_token_id)
- projects = r.result['projects']
- random_project = random.randint(0, len(projects)) - 1
- project = projects[random_project]
-
- v3_scope_request = self._scope_request(employee_unscoped_token_id,
- 'project', project['id'])
-
- r = self.v3_create_token(v3_scope_request)
- token_resp = r.result['token']
- # FIXME(lbragstad): 'token' should be in the list of methods returned
- # but it isn't. This is documented in bug 1501032.
- self.assertIn('token', token_resp['methods'])
- self.assertIn('saml2', token_resp['methods'])
- self._check_project_scoped_token_attributes(token_resp, project['id'])
-
- def test_workflow_with_groups_deletion(self):
- """Test full workflow with groups deletion before token scoping.
-
- The test scenario is as follows:
- - Create group ``group``
- - Create and assign roles to ``group`` and ``project_all``
- - Patch mapping rules for existing IdP so it issues group id
- - Issue unscoped token with ``group``'s id
- - Delete group ``group``
- - Scope token to ``project_all``
- - Expect HTTP 500 response
-
- """
- # create group and role
- group = unit.new_group_ref(domain_id=self.domainA['id'])
- group = self.identity_api.create_group(group)
- role = unit.new_role_ref()
- self.role_api.create_role(role['id'], role)
-
- # assign role to group and project_admins
- self.assignment_api.create_grant(role['id'],
- group_id=group['id'],
- project_id=self.project_all['id'])
-
- rules = {
- 'rules': [
- {
- 'local': [
- {
- 'group': {
- 'id': group['id']
- }
- },
- {
- 'user': {
- 'name': '{0}'
- }
- }
- ],
- 'remote': [
- {
- 'type': 'UserName'
- },
- {
- 'type': 'LastName',
- 'any_one_of': [
- 'Account'
- ]
- }
- ]
- }
- ]
- }
-
- self.federation_api.update_mapping(self.mapping['id'], rules)
-
- r = self._issue_unscoped_token(assertion='TESTER_ASSERTION')
- token_id = r.headers.get('X-Subject-Token')
-
- # delete group
- self.identity_api.delete_group(group['id'])
-
- # scope token to project_all, expect HTTP 500
- scoped_token = self._scope_request(
- token_id, 'project',
- self.project_all['id'])
-
- self.v3_create_token(
- scoped_token, expected_status=http_client.INTERNAL_SERVER_ERROR)
-
- def test_lists_with_missing_group_in_backend(self):
- """Test a mapping that points to a group that does not exist
-
- For explicit mappings, we expect the group to exist in the backend,
- but for lists, specifically blacklists, a missing group is expected
- as many groups will be specified by the IdP that are not Keystone
- groups.
-
- The test scenario is as follows:
- - Create group ``EXISTS``
- - Set mapping rules for existing IdP with a blacklist
- that passes through as REMOTE_USER_GROUPS
- - Issue unscoped token with on group ``EXISTS`` id in it
-
- """
- domain_id = self.domainA['id']
- domain_name = self.domainA['name']
- group = unit.new_group_ref(domain_id=domain_id, name='EXISTS')
- group = self.identity_api.create_group(group)
- rules = {
- 'rules': [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "id": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER"
- }
- ]
- },
- {
- "local": [
- {
- "groups": "{0}",
- "domain": {"name": domain_name}
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER_GROUPS",
- }
- ]
- }
- ]
- }
- self.federation_api.update_mapping(self.mapping['id'], rules)
-
- def test_empty_blacklist_passess_all_values(self):
- """Test a mapping with empty blacklist specified
-
- Not adding a ``blacklist`` keyword to the mapping rules has the same
- effect as adding an empty ``blacklist``.
- In both cases, the mapping engine will not discard any groups that are
- associated with apache environment variables.
-
- This test checks scenario where an empty blacklist was specified.
- Expected result is to allow any value.
-
- The test scenario is as follows:
- - Create group ``EXISTS``
- - Create group ``NO_EXISTS``
- - Set mapping rules for existing IdP with a blacklist
- that passes through as REMOTE_USER_GROUPS
- - Issue unscoped token with groups ``EXISTS`` and ``NO_EXISTS``
- assigned
-
- """
- domain_id = self.domainA['id']
- domain_name = self.domainA['name']
-
- # Add a group "EXISTS"
- group_exists = unit.new_group_ref(domain_id=domain_id, name='EXISTS')
- group_exists = self.identity_api.create_group(group_exists)
-
- # Add a group "NO_EXISTS"
- group_no_exists = unit.new_group_ref(domain_id=domain_id,
- name='NO_EXISTS')
- group_no_exists = self.identity_api.create_group(group_no_exists)
-
- group_ids = set([group_exists['id'], group_no_exists['id']])
-
- rules = {
- 'rules': [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "id": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER"
- }
- ]
- },
- {
- "local": [
- {
- "groups": "{0}",
- "domain": {"name": domain_name}
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER_GROUPS",
- "blacklist": []
- }
- ]
- }
- ]
- }
- self.federation_api.update_mapping(self.mapping['id'], rules)
- r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION')
- assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups']
- self.assertEqual(len(group_ids), len(assigned_group_ids))
- for group in assigned_group_ids:
- self.assertIn(group['id'], group_ids)
-
- def test_not_adding_blacklist_passess_all_values(self):
- """Test a mapping without blacklist specified.
-
- Not adding a ``blacklist`` keyword to the mapping rules has the same
- effect as adding an empty ``blacklist``. In both cases all values will
- be accepted and passed.
-
- This test checks scenario where an blacklist was not specified.
- Expected result is to allow any value.
-
- The test scenario is as follows:
- - Create group ``EXISTS``
- - Create group ``NO_EXISTS``
- - Set mapping rules for existing IdP with a blacklist
- that passes through as REMOTE_USER_GROUPS
- - Issue unscoped token with on groups ``EXISTS`` and ``NO_EXISTS``
- assigned
-
- """
- domain_id = self.domainA['id']
- domain_name = self.domainA['name']
-
- # Add a group "EXISTS"
- group_exists = unit.new_group_ref(domain_id=domain_id,
- name='EXISTS')
- group_exists = self.identity_api.create_group(group_exists)
-
- # Add a group "NO_EXISTS"
- group_no_exists = unit.new_group_ref(domain_id=domain_id,
- name='NO_EXISTS')
- group_no_exists = self.identity_api.create_group(group_no_exists)
-
- group_ids = set([group_exists['id'], group_no_exists['id']])
-
- rules = {
- 'rules': [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "id": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER"
- }
- ]
- },
- {
- "local": [
- {
- "groups": "{0}",
- "domain": {"name": domain_name}
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER_GROUPS",
- }
- ]
- }
- ]
- }
- self.federation_api.update_mapping(self.mapping['id'], rules)
- r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION')
- assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups']
- self.assertEqual(len(group_ids), len(assigned_group_ids))
- for group in assigned_group_ids:
- self.assertIn(group['id'], group_ids)
-
- def test_empty_whitelist_discards_all_values(self):
- """Test that empty whitelist blocks all the values
-
- Not adding a ``whitelist`` keyword to the mapping value is different
- than adding empty whitelist. The former case will simply pass all the
- values, whereas the latter would discard all the values.
-
- This test checks scenario where an empty whitelist was specified.
- The expected result is that no groups are matched.
-
- The test scenario is as follows:
- - Create group ``EXISTS``
- - Set mapping rules for existing IdP with an empty whitelist
- that whould discard any values from the assertion
- - Try issuing unscoped token, expect server to raise
- ``exception.MissingGroups`` as no groups were matched and ephemeral
- user does not have any group assigned.
-
- """
- domain_id = self.domainA['id']
- domain_name = self.domainA['name']
- group = unit.new_group_ref(domain_id=domain_id, name='EXISTS')
- group = self.identity_api.create_group(group)
- rules = {
- 'rules': [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "id": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER"
- }
- ]
- },
- {
- "local": [
- {
- "groups": "{0}",
- "domain": {"name": domain_name}
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER_GROUPS",
- "whitelist": []
- }
- ]
- }
- ]
- }
- self.federation_api.update_mapping(self.mapping['id'], rules)
-
- self.assertRaises(exception.MissingGroups,
- self._issue_unscoped_token,
- assertion='UNMATCHED_GROUP_ASSERTION')
-
- def test_not_setting_whitelist_accepts_all_values(self):
- """Test that not setting whitelist passes
-
- Not adding a ``whitelist`` keyword to the mapping value is different
- than adding empty whitelist. The former case will simply pass all the
- values, whereas the latter would discard all the values.
-
- This test checks a scenario where a ``whitelist`` was not specified.
- Expected result is that no groups are ignored.
-
- The test scenario is as follows:
- - Create group ``EXISTS``
- - Set mapping rules for existing IdP with an empty whitelist
- that whould discard any values from the assertion
- - Issue an unscoped token and make sure ephemeral user is a member of
- two groups.
-
- """
- domain_id = self.domainA['id']
- domain_name = self.domainA['name']
-
- # Add a group "EXISTS"
- group_exists = unit.new_group_ref(domain_id=domain_id,
- name='EXISTS')
- group_exists = self.identity_api.create_group(group_exists)
-
- # Add a group "NO_EXISTS"
- group_no_exists = unit.new_group_ref(domain_id=domain_id,
- name='NO_EXISTS')
- group_no_exists = self.identity_api.create_group(group_no_exists)
-
- group_ids = set([group_exists['id'], group_no_exists['id']])
-
- rules = {
- 'rules': [
- {
- "local": [
- {
- "user": {
- "name": "{0}",
- "id": "{0}"
- }
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER"
- }
- ]
- },
- {
- "local": [
- {
- "groups": "{0}",
- "domain": {"name": domain_name}
- }
- ],
- "remote": [
- {
- "type": "REMOTE_USER_GROUPS",
- }
- ]
- }
- ]
- }
- self.federation_api.update_mapping(self.mapping['id'], rules)
- r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION')
- assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups']
- self.assertEqual(len(group_ids), len(assigned_group_ids))
- for group in assigned_group_ids:
- self.assertIn(group['id'], group_ids)
-
- def test_assertion_prefix_parameter(self):
- """Test parameters filtering based on the prefix.
-
- With ``assertion_prefix`` set to fixed, non default value,
- issue an unscoped token from assertion EMPLOYEE_ASSERTION_PREFIXED.
- Expect server to return unscoped token.
-
- """
- self.config_fixture.config(group='federation',
- assertion_prefix=self.ASSERTION_PREFIX)
- r = self._issue_unscoped_token(assertion='EMPLOYEE_ASSERTION_PREFIXED')
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
-
- def test_assertion_prefix_parameter_expect_fail(self):
- """Test parameters filtering based on the prefix.
-
- With ``assertion_prefix`` default value set to empty string
- issue an unscoped token from assertion EMPLOYEE_ASSERTION.
- Next, configure ``assertion_prefix`` to value ``UserName``.
- Try issuing unscoped token with EMPLOYEE_ASSERTION.
- Expect server to raise exception.Unathorized exception.
-
- """
- r = self._issue_unscoped_token()
- self.assertIsNotNone(r.headers.get('X-Subject-Token'))
- self.config_fixture.config(group='federation',
- assertion_prefix='UserName')
-
- self.assertRaises(exception.Unauthorized,
- self._issue_unscoped_token)
-
- def test_v2_auth_with_federation_token_fails(self):
- """Test that using a federation token with v2 auth fails.
-
- If an admin sets up a federated Keystone environment, and a user
- incorrectly configures a service (like Nova) to only use v2 auth, the
- returned message should be informative.
-
- """
- r = self._issue_unscoped_token()
- token_id = r.headers.get('X-Subject-Token')
- self.assertRaises(exception.Unauthorized,
- self.token_provider_api.validate_v2_token,
- token_id=token_id)
-
- def test_unscoped_token_has_user_domain(self):
- r = self._issue_unscoped_token()
- self._check_domains_are_valid(r.json_body['token'])
-
- def test_scoped_token_has_user_domain(self):
- r = self.v3_create_token(
- self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
- self._check_domains_are_valid(r.result['token'])
-
- def test_issue_unscoped_token_for_local_user(self):
- r = self._issue_unscoped_token(assertion='LOCAL_USER_ASSERTION')
- token_resp = r.json_body['token']
- self.assertListEqual(['saml2'], token_resp['methods'])
- self.assertEqual(self.user['id'], token_resp['user']['id'])
- self.assertEqual(self.user['name'], token_resp['user']['name'])
- self.assertEqual(self.domain['id'], token_resp['user']['domain']['id'])
- # Make sure the token is not scoped
- self.assertNotIn('project', token_resp)
- self.assertNotIn('domain', token_resp)
-
- def test_issue_token_for_local_user_user_not_found(self):
- self.assertRaises(exception.Unauthorized,
- self._issue_unscoped_token,
- assertion='ANOTHER_LOCAL_USER_ASSERTION')
-
-
-class FernetFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin):
- AUTH_METHOD = 'token'
-
- def load_fixtures(self, fixtures):
- super(FernetFederatedTokenTests, self).load_fixtures(fixtures)
- self.load_federation_sample_data()
-
- def config_overrides(self):
- super(FernetFederatedTokenTests, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- def auth_plugin_config_override(self):
- methods = ['saml2', 'token', 'password']
- super(FernetFederatedTokenTests,
- self).auth_plugin_config_override(methods)
-
- def test_federated_unscoped_token(self):
- resp = self._issue_unscoped_token()
- self.assertEqual(204, len(resp.headers['X-Subject-Token']))
- self.assertValidMappedUser(resp.json_body['token'])
-
- def test_federated_unscoped_token_with_multiple_groups(self):
- assertion = 'ANOTHER_CUSTOMER_ASSERTION'
- resp = self._issue_unscoped_token(assertion=assertion)
- self.assertEqual(226, len(resp.headers['X-Subject-Token']))
- self.assertValidMappedUser(resp.json_body['token'])
-
- def test_validate_federated_unscoped_token(self):
- resp = self._issue_unscoped_token()
- unscoped_token = resp.headers.get('X-Subject-Token')
- # assert that the token we received is valid
- self.get('/auth/tokens/', headers={'X-Subject-Token': unscoped_token})
-
- def test_fernet_full_workflow(self):
- """Test 'standard' workflow for granting Fernet access tokens.
-
- * Issue unscoped token
- * List available projects based on groups
- * Scope token to one of available projects
-
- """
- resp = self._issue_unscoped_token()
- self.assertValidMappedUser(resp.json_body['token'])
- unscoped_token = resp.headers.get('X-Subject-Token')
- resp = self.get('/auth/projects', token=unscoped_token)
- projects = resp.result['projects']
- random_project = random.randint(0, len(projects)) - 1
- project = projects[random_project]
-
- v3_scope_request = self._scope_request(unscoped_token,
- 'project', project['id'])
-
- resp = self.v3_create_token(v3_scope_request)
- token_resp = resp.result['token']
- self._check_project_scoped_token_attributes(token_resp, project['id'])
-
-
-class FederatedTokenTestsMethodToken(FederatedTokenTests):
- """Test federation operation with unified scoping auth method.
-
- Test all the operations with auth method set to ``token`` as a new, unified
- way for scoping all the tokens.
-
- """
-
- AUTH_METHOD = 'token'
-
- def auth_plugin_config_override(self):
- methods = ['saml2', 'token']
- super(FederatedTokenTests,
- self).auth_plugin_config_override(methods)
-
- @utils.wip('This will fail because of bug #1501032. The returned method'
- 'list should contain "saml2". This is documented in bug '
- '1501032.')
- def test_full_workflow(self):
- """Test 'standard' workflow for granting access tokens.
-
- * Issue unscoped token
- * List available projects based on groups
- * Scope token to one of available projects
-
- """
- r = self._issue_unscoped_token()
- token_resp = r.json_body['token']
- # NOTE(lbragstad): Ensure only 'saml2' is in the method list.
- self.assertListEqual(['saml2'], token_resp['methods'])
- self.assertValidMappedUser(token_resp)
- employee_unscoped_token_id = r.headers.get('X-Subject-Token')
- r = self.get('/auth/projects', token=employee_unscoped_token_id)
- projects = r.result['projects']
- random_project = random.randint(0, len(projects)) - 1
- project = projects[random_project]
-
- v3_scope_request = self._scope_request(employee_unscoped_token_id,
- 'project', project['id'])
-
- r = self.v3_authenticate_token(v3_scope_request)
- token_resp = r.result['token']
- self.assertIn('token', token_resp['methods'])
- self.assertIn('saml2', token_resp['methods'])
- self._check_project_scoped_token_attributes(token_resp, project['id'])
-
-
-class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin):
- """Tests for federated users
-
- Tests new shadow users functionality
-
- """
-
- def auth_plugin_config_override(self):
- methods = ['saml2']
- super(FederatedUserTests, self).auth_plugin_config_override(methods)
-
- def setUp(self):
- super(FederatedUserTests, self).setUp()
-
- def load_fixtures(self, fixtures):
- super(FederatedUserTests, self).load_fixtures(fixtures)
- self.load_federation_sample_data()
-
- def test_user_id_persistense(self):
- """Ensure user_id is persistend for multiple federated authn calls."""
- r = self._issue_unscoped_token()
- user_id = r.json_body['token']['user']['id']
-
- r = self._issue_unscoped_token()
- user_id2 = r.json_body['token']['user']['id']
- self.assertEqual(user_id, user_id2)
-
-
-class JsonHomeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
- JSON_HOME_DATA = {
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-FEDERATION/'
- '1.0/rel/identity_provider': {
- 'href-template': '/OS-FEDERATION/identity_providers/{idp_id}',
- 'href-vars': {
- 'idp_id': 'http://docs.openstack.org/api/openstack-identity/3/'
- 'ext/OS-FEDERATION/1.0/param/idp_id'
- },
- },
- }
-
-
-def _is_xmlsec1_installed():
- p = subprocess.Popen(
- ['which', 'xmlsec1'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
-
- # invert the return code
- return not bool(p.wait())
-
-
-def _load_xml(filename):
- with open(os.path.join(XMLDIR, filename), 'r') as xml:
- return xml.read()
-
-
-class SAMLGenerationTests(test_v3.RestfulTestCase):
-
- SP_AUTH_URL = ('http://beta.com:5000/v3/OS-FEDERATION/identity_providers'
- '/BETA/protocols/saml2/auth')
-
- ASSERTION_FILE = 'signed_saml2_assertion.xml'
-
- # The values of the following variables match the attributes values found
- # in ASSERTION_FILE
- ISSUER = 'https://acme.com/FIM/sps/openstack/saml20'
- RECIPIENT = 'http://beta.com/Shibboleth.sso/SAML2/POST'
- SUBJECT = 'test_user'
- SUBJECT_DOMAIN = 'user_domain'
- ROLES = ['admin', 'member']
- PROJECT = 'development'
- PROJECT_DOMAIN = 'project_domain'
- SAML_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2'
- ECP_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2/ecp'
- ASSERTION_VERSION = "2.0"
- SERVICE_PROVDIER_ID = 'ACME'
-
- def sp_ref(self):
- ref = {
- 'auth_url': self.SP_AUTH_URL,
- 'enabled': True,
- 'description': uuid.uuid4().hex,
- 'sp_url': self.RECIPIENT,
- 'relay_state_prefix': CONF.saml.relay_state_prefix,
-
- }
- return ref
-
- def setUp(self):
- super(SAMLGenerationTests, self).setUp()
- self.signed_assertion = saml2.create_class_from_xml_string(
- saml.Assertion, _load_xml(self.ASSERTION_FILE))
- self.sp = self.sp_ref()
- url = '/OS-FEDERATION/service_providers/' + self.SERVICE_PROVDIER_ID
- self.put(url, body={'service_provider': self.sp},
- expected_status=http_client.CREATED)
-
- def test_samlize_token_values(self):
- """Test the SAML generator produces a SAML object.
-
- Test the SAML generator directly by passing known arguments, the result
- should be a SAML object that consistently includes attributes based on
- the known arguments that were passed in.
-
- """
- with mock.patch.object(keystone_idp, '_sign_assertion',
- return_value=self.signed_assertion):
- generator = keystone_idp.SAMLGenerator()
- response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT,
- self.SUBJECT_DOMAIN,
- self.ROLES, self.PROJECT,
- self.PROJECT_DOMAIN)
-
- assertion = response.assertion
- self.assertIsNotNone(assertion)
- self.assertIsInstance(assertion, saml.Assertion)
- issuer = response.issuer
- self.assertEqual(self.RECIPIENT, response.destination)
- self.assertEqual(self.ISSUER, issuer.text)
-
- user_attribute = assertion.attribute_statement[0].attribute[0]
- self.assertEqual(self.SUBJECT, user_attribute.attribute_value[0].text)
-
- user_domain_attribute = (
- assertion.attribute_statement[0].attribute[1])
- self.assertEqual(self.SUBJECT_DOMAIN,
- user_domain_attribute.attribute_value[0].text)
-
- role_attribute = assertion.attribute_statement[0].attribute[2]
- for attribute_value in role_attribute.attribute_value:
- self.assertIn(attribute_value.text, self.ROLES)
-
- project_attribute = assertion.attribute_statement[0].attribute[3]
- self.assertEqual(self.PROJECT,
- project_attribute.attribute_value[0].text)
-
- project_domain_attribute = (
- assertion.attribute_statement[0].attribute[4])
- self.assertEqual(self.PROJECT_DOMAIN,
- project_domain_attribute.attribute_value[0].text)
-
- def test_verify_assertion_object(self):
- """Test that the Assertion object is built properly.
-
- The Assertion doesn't need to be signed in this test, so
- _sign_assertion method is patched and doesn't alter the assertion.
-
- """
- with mock.patch.object(keystone_idp, '_sign_assertion',
- side_effect=lambda x: x):
- generator = keystone_idp.SAMLGenerator()
- response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT,
- self.SUBJECT_DOMAIN,
- self.ROLES, self.PROJECT,
- self.PROJECT_DOMAIN)
- assertion = response.assertion
- self.assertEqual(self.ASSERTION_VERSION, assertion.version)
-
- def test_valid_saml_xml(self):
- """Test the generated SAML object can become valid XML.
-
- Test the generator directly by passing known arguments, the result
- should be a SAML object that consistently includes attributes based on
- the known arguments that were passed in.
-
- """
- with mock.patch.object(keystone_idp, '_sign_assertion',
- return_value=self.signed_assertion):
- generator = keystone_idp.SAMLGenerator()
- response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT,
- self.SUBJECT_DOMAIN,
- self.ROLES, self.PROJECT,
- self.PROJECT_DOMAIN)
-
- saml_str = response.to_string()
- response = etree.fromstring(saml_str)
- issuer = response[0]
- assertion = response[2]
-
- self.assertEqual(self.RECIPIENT, response.get('Destination'))
- self.assertEqual(self.ISSUER, issuer.text)
-
- user_attribute = assertion[4][0]
- self.assertEqual(self.SUBJECT, user_attribute[0].text)
-
- user_domain_attribute = assertion[4][1]
- self.assertEqual(self.SUBJECT_DOMAIN, user_domain_attribute[0].text)
-
- role_attribute = assertion[4][2]
- for attribute_value in role_attribute:
- self.assertIn(attribute_value.text, self.ROLES)
-
- project_attribute = assertion[4][3]
- self.assertEqual(self.PROJECT, project_attribute[0].text)
-
- project_domain_attribute = assertion[4][4]
- self.assertEqual(self.PROJECT_DOMAIN, project_domain_attribute[0].text)
-
- def test_assertion_using_explicit_namespace_prefixes(self):
- def mocked_subprocess_check_output(*popenargs, **kwargs):
- # the last option is the assertion file to be signed
- filename = popenargs[0][-1]
- with open(filename, 'r') as f:
- assertion_content = f.read()
- # since we are not testing the signature itself, we can return
- # the assertion as is without signing it
- return assertion_content
-
- with mock.patch.object(subprocess, 'check_output',
- side_effect=mocked_subprocess_check_output):
- generator = keystone_idp.SAMLGenerator()
- response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT,
- self.SUBJECT_DOMAIN,
- self.ROLES, self.PROJECT,
- self.PROJECT_DOMAIN)
- assertion_xml = response.assertion.to_string()
- # make sure we have the proper tag and prefix for the assertion
- # namespace
- self.assertIn('<saml:Assertion', assertion_xml)
- self.assertIn('xmlns:saml="' + saml2.NAMESPACE + '"',
- assertion_xml)
- self.assertIn('xmlns:xmldsig="' + xmldsig.NAMESPACE + '"',
- assertion_xml)
-
- def test_saml_signing(self):
- """Test that the SAML generator produces a SAML object.
-
- Test the SAML generator directly by passing known arguments, the result
- should be a SAML object that consistently includes attributes based on
- the known arguments that were passed in.
-
- """
- if not _is_xmlsec1_installed():
- self.skipTest('xmlsec1 is not installed')
-
- generator = keystone_idp.SAMLGenerator()
- response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
- self.SUBJECT, self.SUBJECT_DOMAIN,
- self.ROLES, self.PROJECT,
- self.PROJECT_DOMAIN)
-
- signature = response.assertion.signature
- self.assertIsNotNone(signature)
- self.assertIsInstance(signature, xmldsig.Signature)
-
- idp_public_key = sigver.read_cert_from_file(CONF.saml.certfile, 'pem')
- cert_text = signature.key_info.x509_data[0].x509_certificate.text
- # NOTE(stevemar): Rather than one line of text, the certificate is
- # printed with newlines for readability, we remove these so we can
- # match it with the key that we used.
- cert_text = cert_text.replace(os.linesep, '')
- self.assertEqual(idp_public_key, cert_text)
-
- def _create_generate_saml_request(self, token_id, sp_id):
- return {
- "auth": {
- "identity": {
- "methods": [
- "token"
- ],
- "token": {
- "id": token_id
- }
- },
- "scope": {
- "service_provider": {
- "id": sp_id
- }
- }
- }
- }
-
- def _fetch_valid_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- resp = self.v3_create_token(auth_data)
- token_id = resp.headers.get('X-Subject-Token')
- return token_id
-
- def _fetch_domain_scoped_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- user_domain_id=self.domain['id'])
- resp = self.v3_create_token(auth_data)
- token_id = resp.headers.get('X-Subject-Token')
- return token_id
-
- def test_not_project_scoped_token(self):
- """Ensure SAML generation fails when passing domain-scoped tokens.
-
- The server should return a 403 Forbidden Action.
-
- """
- self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
- token_id = self._fetch_domain_scoped_token()
- body = self._create_generate_saml_request(token_id,
- self.SERVICE_PROVDIER_ID)
- with mock.patch.object(keystone_idp, '_sign_assertion',
- return_value=self.signed_assertion):
- self.post(self.SAML_GENERATION_ROUTE, body=body,
- expected_status=http_client.FORBIDDEN)
-
- def test_generate_saml_route(self):
- """Test that the SAML generation endpoint produces XML.
-
- The SAML endpoint /v3/auth/OS-FEDERATION/saml2 should take as input,
- a scoped token ID, and a Service Provider ID.
- The controller should fetch details about the user from the token,
- and details about the service provider from its ID.
- This should be enough information to invoke the SAML generator and
- provide a valid SAML (XML) document back.
-
- """
- self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
- token_id = self._fetch_valid_token()
- body = self._create_generate_saml_request(token_id,
- self.SERVICE_PROVDIER_ID)
-
- with mock.patch.object(keystone_idp, '_sign_assertion',
- return_value=self.signed_assertion):
- http_response = self.post(self.SAML_GENERATION_ROUTE, body=body,
- response_content_type='text/xml',
- expected_status=http_client.OK)
-
- response = etree.fromstring(http_response.result)
- issuer = response[0]
- assertion = response[2]
-
- self.assertEqual(self.RECIPIENT, response.get('Destination'))
- self.assertEqual(self.ISSUER, issuer.text)
-
- # NOTE(stevemar): We should test this against expected values,
- # but the self.xyz attribute names are uuids, and we mock out
- # the result. Ideally we should update the mocked result with
- # some known data, and create the roles/project/user before
- # these tests run.
- user_attribute = assertion[4][0]
- self.assertIsInstance(user_attribute[0].text, str)
-
- user_domain_attribute = assertion[4][1]
- self.assertIsInstance(user_domain_attribute[0].text, str)
-
- role_attribute = assertion[4][2]
- self.assertIsInstance(role_attribute[0].text, str)
-
- project_attribute = assertion[4][3]
- self.assertIsInstance(project_attribute[0].text, str)
-
- project_domain_attribute = assertion[4][4]
- self.assertIsInstance(project_domain_attribute[0].text, str)
-
- def test_invalid_scope_body(self):
- """Test that missing the scope in request body raises an exception.
-
- Raises exception.SchemaValidationError() - error 400 Bad Request
-
- """
- token_id = uuid.uuid4().hex
- body = self._create_generate_saml_request(token_id,
- self.SERVICE_PROVDIER_ID)
- del body['auth']['scope']
-
- self.post(self.SAML_GENERATION_ROUTE, body=body,
- expected_status=http_client.BAD_REQUEST)
-
- def test_invalid_token_body(self):
- """Test that missing the token in request body raises an exception.
-
- Raises exception.SchemaValidationError() - error 400 Bad Request
-
- """
- token_id = uuid.uuid4().hex
- body = self._create_generate_saml_request(token_id,
- self.SERVICE_PROVDIER_ID)
- del body['auth']['identity']['token']
-
- self.post(self.SAML_GENERATION_ROUTE, body=body,
- expected_status=http_client.BAD_REQUEST)
-
- def test_sp_not_found(self):
- """Test SAML generation with an invalid service provider ID.
-
- Raises exception.ServiceProviderNotFound() - error Not Found 404
-
- """
- sp_id = uuid.uuid4().hex
- token_id = self._fetch_valid_token()
- body = self._create_generate_saml_request(token_id, sp_id)
- self.post(self.SAML_GENERATION_ROUTE, body=body,
- expected_status=http_client.NOT_FOUND)
-
- def test_sp_disabled(self):
- """Try generating assertion for disabled Service Provider."""
- # Disable Service Provider
- sp_ref = {'enabled': False}
- self.federation_api.update_sp(self.SERVICE_PROVDIER_ID, sp_ref)
-
- token_id = self._fetch_valid_token()
- body = self._create_generate_saml_request(token_id,
- self.SERVICE_PROVDIER_ID)
- self.post(self.SAML_GENERATION_ROUTE, body=body,
- expected_status=http_client.FORBIDDEN)
-
- def test_token_not_found(self):
- """Test that an invalid token in the request body raises an exception.
-
- Raises exception.TokenNotFound() - error Not Found 404
-
- """
- token_id = uuid.uuid4().hex
- body = self._create_generate_saml_request(token_id,
- self.SERVICE_PROVDIER_ID)
- self.post(self.SAML_GENERATION_ROUTE, body=body,
- expected_status=http_client.NOT_FOUND)
-
- def test_generate_ecp_route(self):
- """Test that the ECP generation endpoint produces XML.
-
- The ECP endpoint /v3/auth/OS-FEDERATION/saml2/ecp should take the same
- input as the SAML generation endpoint (scoped token ID + Service
- Provider ID).
- The controller should return a SAML assertion that is wrapped in a
- SOAP envelope.
- """
- self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
- token_id = self._fetch_valid_token()
- body = self._create_generate_saml_request(token_id,
- self.SERVICE_PROVDIER_ID)
-
- with mock.patch.object(keystone_idp, '_sign_assertion',
- return_value=self.signed_assertion):
- http_response = self.post(self.ECP_GENERATION_ROUTE, body=body,
- response_content_type='text/xml',
- expected_status=http_client.OK)
-
- env_response = etree.fromstring(http_response.result)
- header = env_response[0]
-
- # Verify the relay state starts with 'ss:mem'
- prefix = CONF.saml.relay_state_prefix
- self.assertThat(header[0].text, matchers.StartsWith(prefix))
-
- # Verify that the content in the body matches the expected assertion
- body = env_response[1]
- response = body[0]
- issuer = response[0]
- assertion = response[2]
-
- self.assertEqual(self.RECIPIENT, response.get('Destination'))
- self.assertEqual(self.ISSUER, issuer.text)
-
- user_attribute = assertion[4][0]
- self.assertIsInstance(user_attribute[0].text, str)
-
- user_domain_attribute = assertion[4][1]
- self.assertIsInstance(user_domain_attribute[0].text, str)
-
- role_attribute = assertion[4][2]
- self.assertIsInstance(role_attribute[0].text, str)
-
- project_attribute = assertion[4][3]
- self.assertIsInstance(project_attribute[0].text, str)
-
- project_domain_attribute = assertion[4][4]
- self.assertIsInstance(project_domain_attribute[0].text, str)
-
- @mock.patch('saml2.create_class_from_xml_string')
- @mock.patch('oslo_utils.fileutils.write_to_tempfile')
- @mock.patch.object(subprocess, 'check_output')
- def test__sign_assertion(self, check_output_mock,
- write_to_tempfile_mock, create_class_mock):
- write_to_tempfile_mock.return_value = 'tmp_path'
- check_output_mock.return_value = 'fakeoutput'
-
- keystone_idp._sign_assertion(self.signed_assertion)
-
- create_class_mock.assert_called_with(saml.Assertion, 'fakeoutput')
-
- @mock.patch('oslo_utils.fileutils.write_to_tempfile')
- @mock.patch.object(subprocess, 'check_output')
- def test__sign_assertion_exc(self, check_output_mock,
- write_to_tempfile_mock):
- # If the command fails the command output is logged.
-
- write_to_tempfile_mock.return_value = 'tmp_path'
-
- sample_returncode = 1
- sample_output = self.getUniqueString()
- check_output_mock.side_effect = subprocess.CalledProcessError(
- returncode=sample_returncode, cmd=CONF.saml.xmlsec1_binary,
- output=sample_output)
-
- logger_fixture = self.useFixture(fixtures.LoggerFixture())
- self.assertRaises(exception.SAMLSigningError,
- keystone_idp._sign_assertion,
- self.signed_assertion)
- expected_log = (
- "Error when signing assertion, reason: Command '%s' returned "
- "non-zero exit status %s %s\n" %
- (CONF.saml.xmlsec1_binary, sample_returncode, sample_output))
- self.assertEqual(expected_log, logger_fixture.output)
-
- @mock.patch('oslo_utils.fileutils.write_to_tempfile')
- def test__sign_assertion_fileutils_exc(self, write_to_tempfile_mock):
- exception_msg = 'fake'
- write_to_tempfile_mock.side_effect = Exception(exception_msg)
-
- logger_fixture = self.useFixture(fixtures.LoggerFixture())
- self.assertRaises(exception.SAMLSigningError,
- keystone_idp._sign_assertion,
- self.signed_assertion)
- expected_log = (
- 'Error when signing assertion, reason: %s\n' % exception_msg)
- self.assertEqual(expected_log, logger_fixture.output)
-
-
-class IdPMetadataGenerationTests(test_v3.RestfulTestCase):
- """A class for testing Identity Provider Metadata generation."""
-
- METADATA_URL = '/OS-FEDERATION/saml2/metadata'
-
- def setUp(self):
- super(IdPMetadataGenerationTests, self).setUp()
- self.generator = keystone_idp.MetadataGenerator()
-
- def config_overrides(self):
- super(IdPMetadataGenerationTests, self).config_overrides()
- self.config_fixture.config(
- group='saml',
- idp_entity_id=federation_fixtures.IDP_ENTITY_ID,
- idp_sso_endpoint=federation_fixtures.IDP_SSO_ENDPOINT,
- idp_organization_name=federation_fixtures.IDP_ORGANIZATION_NAME,
- idp_organization_display_name=(
- federation_fixtures.IDP_ORGANIZATION_DISPLAY_NAME),
- idp_organization_url=federation_fixtures.IDP_ORGANIZATION_URL,
- idp_contact_company=federation_fixtures.IDP_CONTACT_COMPANY,
- idp_contact_name=federation_fixtures.IDP_CONTACT_GIVEN_NAME,
- idp_contact_surname=federation_fixtures.IDP_CONTACT_SURNAME,
- idp_contact_email=federation_fixtures.IDP_CONTACT_EMAIL,
- idp_contact_telephone=(
- federation_fixtures.IDP_CONTACT_TELEPHONE_NUMBER),
- idp_contact_type=federation_fixtures.IDP_CONTACT_TYPE)
-
- def test_check_entity_id(self):
- metadata = self.generator.generate_metadata()
- self.assertEqual(federation_fixtures.IDP_ENTITY_ID, metadata.entity_id)
-
- def test_metadata_validity(self):
- """Call md.EntityDescriptor method that does internal verification."""
- self.generator.generate_metadata().verify()
-
- def test_serialize_metadata_object(self):
- """Check whether serialization doesn't raise any exceptions."""
- self.generator.generate_metadata().to_string()
- # TODO(marek-denis): Check values here
-
- def test_check_idp_sso(self):
- metadata = self.generator.generate_metadata()
- idpsso_descriptor = metadata.idpsso_descriptor
- self.assertIsNotNone(metadata.idpsso_descriptor)
- self.assertEqual(federation_fixtures.IDP_SSO_ENDPOINT,
- idpsso_descriptor.single_sign_on_service.location)
-
- self.assertIsNotNone(idpsso_descriptor.organization)
- organization = idpsso_descriptor.organization
- self.assertEqual(federation_fixtures.IDP_ORGANIZATION_DISPLAY_NAME,
- organization.organization_display_name.text)
- self.assertEqual(federation_fixtures.IDP_ORGANIZATION_NAME,
- organization.organization_name.text)
- self.assertEqual(federation_fixtures.IDP_ORGANIZATION_URL,
- organization.organization_url.text)
-
- self.assertIsNotNone(idpsso_descriptor.contact_person)
- contact_person = idpsso_descriptor.contact_person
-
- self.assertEqual(federation_fixtures.IDP_CONTACT_GIVEN_NAME,
- contact_person.given_name.text)
- self.assertEqual(federation_fixtures.IDP_CONTACT_SURNAME,
- contact_person.sur_name.text)
- self.assertEqual(federation_fixtures.IDP_CONTACT_EMAIL,
- contact_person.email_address.text)
- self.assertEqual(federation_fixtures.IDP_CONTACT_TELEPHONE_NUMBER,
- contact_person.telephone_number.text)
- self.assertEqual(federation_fixtures.IDP_CONTACT_TYPE,
- contact_person.contact_type)
-
- def test_metadata_no_organization(self):
- self.config_fixture.config(
- group='saml',
- idp_organization_display_name=None,
- idp_organization_url=None,
- idp_organization_name=None)
- metadata = self.generator.generate_metadata()
- idpsso_descriptor = metadata.idpsso_descriptor
- self.assertIsNotNone(metadata.idpsso_descriptor)
- self.assertIsNone(idpsso_descriptor.organization)
- self.assertIsNotNone(idpsso_descriptor.contact_person)
-
- def test_metadata_no_contact_person(self):
- self.config_fixture.config(
- group='saml',
- idp_contact_name=None,
- idp_contact_surname=None,
- idp_contact_email=None,
- idp_contact_telephone=None)
- metadata = self.generator.generate_metadata()
- idpsso_descriptor = metadata.idpsso_descriptor
- self.assertIsNotNone(metadata.idpsso_descriptor)
- self.assertIsNotNone(idpsso_descriptor.organization)
- self.assertEqual([], idpsso_descriptor.contact_person)
-
- def test_metadata_invalid_contact_type(self):
- self.config_fixture.config(
- group='saml',
- idp_contact_type="invalid")
- self.assertRaises(exception.ValidationError,
- self.generator.generate_metadata)
-
- def test_metadata_invalid_idp_sso_endpoint(self):
- self.config_fixture.config(
- group='saml',
- idp_sso_endpoint=None)
- self.assertRaises(exception.ValidationError,
- self.generator.generate_metadata)
-
- def test_metadata_invalid_idp_entity_id(self):
- self.config_fixture.config(
- group='saml',
- idp_entity_id=None)
- self.assertRaises(exception.ValidationError,
- self.generator.generate_metadata)
-
- def test_get_metadata_with_no_metadata_file_configured(self):
- self.get(self.METADATA_URL,
- expected_status=http_client.INTERNAL_SERVER_ERROR)
-
- def test_get_metadata(self):
- self.config_fixture.config(
- group='saml', idp_metadata_path=XMLDIR + '/idp_saml2_metadata.xml')
- r = self.get(self.METADATA_URL, response_content_type='text/xml')
- self.assertEqual('text/xml', r.headers.get('Content-Type'))
-
- reference_file = _load_xml('idp_saml2_metadata.xml')
- self.assertEqual(reference_file, r.result)
-
-
-class ServiceProviderTests(test_v3.RestfulTestCase):
- """A test class for Service Providers."""
-
- MEMBER_NAME = 'service_provider'
- COLLECTION_NAME = 'service_providers'
- SERVICE_PROVIDER_ID = 'ACME'
- SP_KEYS = ['auth_url', 'id', 'enabled', 'description',
- 'relay_state_prefix', 'sp_url']
-
- def setUp(self):
- super(ServiceProviderTests, self).setUp()
- # Add a Service Provider
- url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- self.SP_REF = self.sp_ref()
- self.SERVICE_PROVIDER = self.put(
- url, body={'service_provider': self.SP_REF},
- expected_status=http_client.CREATED).result
-
- def sp_ref(self):
- ref = {
- 'auth_url': 'https://' + uuid.uuid4().hex + '.com',
- 'enabled': True,
- 'description': uuid.uuid4().hex,
- 'sp_url': 'https://' + uuid.uuid4().hex + '.com',
- 'relay_state_prefix': CONF.saml.relay_state_prefix
- }
- return ref
-
- def base_url(self, suffix=None):
- if suffix is not None:
- return '/OS-FEDERATION/service_providers/' + str(suffix)
- return '/OS-FEDERATION/service_providers'
-
- def _create_default_sp(self, body=None):
- """Create default Service Provider."""
- url = self.base_url(suffix=uuid.uuid4().hex)
- if body is None:
- body = self.sp_ref()
- resp = self.put(url, body={'service_provider': body},
- expected_status=http_client.CREATED)
- return resp
-
- def test_get_service_provider(self):
- url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- resp = self.get(url)
- self.assertValidEntity(resp.result['service_provider'],
- keys_to_check=self.SP_KEYS)
-
- def test_get_service_provider_fail(self):
- url = self.base_url(suffix=uuid.uuid4().hex)
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_create_service_provider(self):
- url = self.base_url(suffix=uuid.uuid4().hex)
- sp = self.sp_ref()
- resp = self.put(url, body={'service_provider': sp},
- expected_status=http_client.CREATED)
- self.assertValidEntity(resp.result['service_provider'],
- keys_to_check=self.SP_KEYS)
-
- def test_create_sp_relay_state_default(self):
- """Create an SP without relay state, should default to `ss:mem`."""
- url = self.base_url(suffix=uuid.uuid4().hex)
- sp = self.sp_ref()
- del sp['relay_state_prefix']
- resp = self.put(url, body={'service_provider': sp},
- expected_status=http_client.CREATED)
- sp_result = resp.result['service_provider']
- self.assertEqual(CONF.saml.relay_state_prefix,
- sp_result['relay_state_prefix'])
-
- def test_create_sp_relay_state_non_default(self):
- """Create an SP with custom relay state."""
- url = self.base_url(suffix=uuid.uuid4().hex)
- sp = self.sp_ref()
- non_default_prefix = uuid.uuid4().hex
- sp['relay_state_prefix'] = non_default_prefix
- resp = self.put(url, body={'service_provider': sp},
- expected_status=http_client.CREATED)
- sp_result = resp.result['service_provider']
- self.assertEqual(non_default_prefix,
- sp_result['relay_state_prefix'])
-
- def test_create_service_provider_fail(self):
- """Try adding SP object with unallowed attribute."""
- url = self.base_url(suffix=uuid.uuid4().hex)
- sp = self.sp_ref()
- sp[uuid.uuid4().hex] = uuid.uuid4().hex
- self.put(url, body={'service_provider': sp},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_service_providers(self):
- """Test listing of service provider objects.
-
- Add two new service providers. List all available service providers.
- Expect to get list of three service providers (one created by setUp())
- Test if attributes match.
-
- """
- ref_service_providers = {
- uuid.uuid4().hex: self.sp_ref(),
- uuid.uuid4().hex: self.sp_ref(),
- }
- for id, sp in ref_service_providers.items():
- url = self.base_url(suffix=id)
- self.put(url, body={'service_provider': sp},
- expected_status=http_client.CREATED)
-
- # Insert ids into service provider object, we will compare it with
- # responses from server and those include 'id' attribute.
-
- ref_service_providers[self.SERVICE_PROVIDER_ID] = self.SP_REF
- for id, sp in ref_service_providers.items():
- sp['id'] = id
-
- url = self.base_url()
- resp = self.get(url)
- service_providers = resp.result
- for service_provider in service_providers['service_providers']:
- id = service_provider['id']
- self.assertValidEntity(
- service_provider, ref=ref_service_providers[id],
- keys_to_check=self.SP_KEYS)
-
- def test_update_service_provider(self):
- """Update existing service provider.
-
- Update default existing service provider and make sure it has been
- properly changed.
-
- """
- new_sp_ref = self.sp_ref()
- url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- resp = self.patch(url, body={'service_provider': new_sp_ref})
- patch_result = resp.result
- new_sp_ref['id'] = self.SERVICE_PROVIDER_ID
- self.assertValidEntity(patch_result['service_provider'],
- ref=new_sp_ref,
- keys_to_check=self.SP_KEYS)
-
- resp = self.get(url)
- get_result = resp.result
-
- self.assertDictEqual(patch_result['service_provider'],
- get_result['service_provider'])
-
- def test_update_service_provider_immutable_parameters(self):
- """Update immutable attributes in service provider.
-
- In this particular case the test will try to change ``id`` attribute.
- The server should return an HTTP 403 Forbidden error code.
-
- """
- new_sp_ref = {'id': uuid.uuid4().hex}
- url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- self.patch(url, body={'service_provider': new_sp_ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_service_provider_unknown_parameter(self):
- new_sp_ref = self.sp_ref()
- new_sp_ref[uuid.uuid4().hex] = uuid.uuid4().hex
- url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- self.patch(url, body={'service_provider': new_sp_ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_service_provider_returns_not_found(self):
- new_sp_ref = self.sp_ref()
- new_sp_ref['description'] = uuid.uuid4().hex
- url = self.base_url(suffix=uuid.uuid4().hex)
- self.patch(url, body={'service_provider': new_sp_ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_update_sp_relay_state(self):
- """Update an SP with custom relay state."""
- new_sp_ref = self.sp_ref()
- non_default_prefix = uuid.uuid4().hex
- new_sp_ref['relay_state_prefix'] = non_default_prefix
- url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- resp = self.patch(url, body={'service_provider': new_sp_ref})
- sp_result = resp.result['service_provider']
- self.assertEqual(non_default_prefix,
- sp_result['relay_state_prefix'])
-
- def test_delete_service_provider(self):
- url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- self.delete(url)
-
- def test_delete_service_provider_returns_not_found(self):
- url = self.base_url(suffix=uuid.uuid4().hex)
- self.delete(url, expected_status=http_client.NOT_FOUND)
-
- def test_filter_list_sp_by_id(self):
- def get_id(resp):
- sp = resp.result.get('service_provider')
- return sp.get('id')
-
- sp1_id = get_id(self._create_default_sp())
- sp2_id = get_id(self._create_default_sp())
-
- # list the SP, should get SPs.
- url = self.base_url()
- resp = self.get(url)
- sps = resp.result.get('service_providers')
- entities_ids = [e['id'] for e in sps]
- self.assertIn(sp1_id, entities_ids)
- self.assertIn(sp2_id, entities_ids)
-
- # filter the SP by 'id'. Only SP1 should appear.
- url = self.base_url() + '?id=' + sp1_id
- resp = self.get(url)
- sps = resp.result.get('service_providers')
- entities_ids = [e['id'] for e in sps]
- self.assertIn(sp1_id, entities_ids)
- self.assertNotIn(sp2_id, entities_ids)
-
- def test_filter_list_sp_by_enabled(self):
- def get_id(resp):
- sp = resp.result.get('service_provider')
- return sp.get('id')
-
- sp1_id = get_id(self._create_default_sp())
- sp2_ref = self.sp_ref()
- sp2_ref['enabled'] = False
- sp2_id = get_id(self._create_default_sp(body=sp2_ref))
-
- # list the SP, should get two SPs.
- url = self.base_url()
- resp = self.get(url)
- sps = resp.result.get('service_providers')
- entities_ids = [e['id'] for e in sps]
- self.assertIn(sp1_id, entities_ids)
- self.assertIn(sp2_id, entities_ids)
-
- # filter the SP by 'enabled'. Only SP1 should appear.
- url = self.base_url() + '?enabled=True'
- resp = self.get(url)
- sps = resp.result.get('service_providers')
- entities_ids = [e['id'] for e in sps]
- self.assertIn(sp1_id, entities_ids)
- self.assertNotIn(sp2_id, entities_ids)
-
-
-class WebSSOTests(FederatedTokenTests):
- """A class for testing Web SSO."""
-
- SSO_URL = '/auth/OS-FEDERATION/websso/'
- SSO_TEMPLATE_NAME = 'sso_callback_template.html'
- SSO_TEMPLATE_PATH = os.path.join(core.dirs.etc(), SSO_TEMPLATE_NAME)
- TRUSTED_DASHBOARD = 'http://horizon.com'
- ORIGIN = urllib.parse.quote_plus(TRUSTED_DASHBOARD)
- PROTOCOL_REMOTE_ID_ATTR = uuid.uuid4().hex
-
- def setUp(self):
- super(WebSSOTests, self).setUp()
- self.api = federation_controllers.Auth()
-
- def config_overrides(self):
- super(WebSSOTests, self).config_overrides()
- self.config_fixture.config(
- group='federation',
- trusted_dashboard=[self.TRUSTED_DASHBOARD],
- sso_callback_template=self.SSO_TEMPLATE_PATH,
- remote_id_attribute=self.REMOTE_ID_ATTR)
-
- def test_render_callback_template(self):
- token_id = uuid.uuid4().hex
- resp = self.api.render_html_response(self.TRUSTED_DASHBOARD, token_id)
- self.assertIn(token_id, resp.body)
- self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
-
- def test_federated_sso_auth(self):
- environment = {self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
- context = {'environment': environment}
- query_string = {'origin': self.ORIGIN}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
- resp = self.api.federated_sso_auth(context, self.PROTOCOL)
- self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
-
- def test_get_sso_origin_host_case_insensitive(self):
- # test lowercase hostname in trusted_dashboard
- context = {
- 'query_string': {
- 'origin': "http://horizon.com",
- },
- }
- host = self.api._get_sso_origin_host(context)
- self.assertEqual("http://horizon.com", host)
- # test uppercase hostname in trusted_dashboard
- self.config_fixture.config(group='federation',
- trusted_dashboard=['http://Horizon.com'])
- host = self.api._get_sso_origin_host(context)
- self.assertEqual("http://horizon.com", host)
-
- def test_federated_sso_auth_with_protocol_specific_remote_id(self):
- self.config_fixture.config(
- group=self.PROTOCOL,
- remote_id_attribute=self.PROTOCOL_REMOTE_ID_ATTR)
-
- environment = {self.PROTOCOL_REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
- context = {'environment': environment}
- query_string = {'origin': self.ORIGIN}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
- resp = self.api.federated_sso_auth(context, self.PROTOCOL)
- self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
-
- def test_federated_sso_auth_bad_remote_id(self):
- environment = {self.REMOTE_ID_ATTR: self.IDP}
- context = {'environment': environment}
- query_string = {'origin': self.ORIGIN}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
- self.assertRaises(exception.IdentityProviderNotFound,
- self.api.federated_sso_auth,
- context, self.PROTOCOL)
-
- def test_federated_sso_missing_query(self):
- environment = {self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
- context = {'environment': environment}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION')
- self.assertRaises(exception.ValidationError,
- self.api.federated_sso_auth,
- context, self.PROTOCOL)
-
- def test_federated_sso_missing_query_bad_remote_id(self):
- environment = {self.REMOTE_ID_ATTR: self.IDP}
- context = {'environment': environment}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION')
- self.assertRaises(exception.ValidationError,
- self.api.federated_sso_auth,
- context, self.PROTOCOL)
-
- def test_federated_sso_untrusted_dashboard(self):
- environment = {self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
- context = {'environment': environment}
- query_string = {'origin': uuid.uuid4().hex}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
- self.assertRaises(exception.Unauthorized,
- self.api.federated_sso_auth,
- context, self.PROTOCOL)
-
- def test_federated_sso_untrusted_dashboard_bad_remote_id(self):
- environment = {self.REMOTE_ID_ATTR: self.IDP}
- context = {'environment': environment}
- query_string = {'origin': uuid.uuid4().hex}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
- self.assertRaises(exception.Unauthorized,
- self.api.federated_sso_auth,
- context, self.PROTOCOL)
-
- def test_federated_sso_missing_remote_id(self):
- context = {'environment': {}}
- query_string = {'origin': self.ORIGIN}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
- self.assertRaises(exception.Unauthorized,
- self.api.federated_sso_auth,
- context, self.PROTOCOL)
-
- def test_identity_provider_specific_federated_authentication(self):
- environment = {self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}
- context = {'environment': environment}
- query_string = {'origin': self.ORIGIN}
- self._inject_assertion(context, 'EMPLOYEE_ASSERTION', query_string)
- resp = self.api.federated_idp_specific_sso_auth(context,
- self.idp['id'],
- self.PROTOCOL)
- self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
-
-
-class K2KServiceCatalogTests(test_v3.RestfulTestCase):
- SP1 = 'SP1'
- SP2 = 'SP2'
- SP3 = 'SP3'
-
- def setUp(self):
- super(K2KServiceCatalogTests, self).setUp()
-
- sp = self.sp_ref()
- self.federation_api.create_sp(self.SP1, sp)
- self.sp_alpha = {self.SP1: sp}
-
- sp = self.sp_ref()
- self.federation_api.create_sp(self.SP2, sp)
- self.sp_beta = {self.SP2: sp}
-
- sp = self.sp_ref()
- self.federation_api.create_sp(self.SP3, sp)
- self.sp_gamma = {self.SP3: sp}
-
- self.token_v3_helper = token_common.V3TokenDataHelper()
-
- def sp_response(self, id, ref):
- ref.pop('enabled')
- ref.pop('description')
- ref.pop('relay_state_prefix')
- ref['id'] = id
- return ref
-
- def sp_ref(self):
- ref = {
- 'auth_url': uuid.uuid4().hex,
- 'enabled': True,
- 'description': uuid.uuid4().hex,
- 'sp_url': uuid.uuid4().hex,
- 'relay_state_prefix': CONF.saml.relay_state_prefix,
- }
- return ref
-
- def _validate_service_providers(self, token, ref):
- token_data = token['token']
- self.assertIn('service_providers', token_data)
- self.assertIsNotNone(token_data['service_providers'])
- service_providers = token_data.get('service_providers')
-
- self.assertEqual(len(ref), len(service_providers))
- for entity in service_providers:
- id = entity.get('id')
- ref_entity = self.sp_response(id, ref.get(id))
- self.assertDictEqual(entity, ref_entity)
-
- def test_service_providers_in_token(self):
- """Check if service providers are listed in service catalog."""
- token = self.token_v3_helper.get_token_data(self.user_id, ['password'])
- ref = {}
- for r in (self.sp_alpha, self.sp_beta, self.sp_gamma):
- ref.update(r)
- self._validate_service_providers(token, ref)
-
- def test_service_provides_in_token_disabled_sp(self):
- """Test behaviour with disabled service providers.
-
- Disabled service providers should not be listed in the service
- catalog.
-
- """
- # disable service provider ALPHA
- sp_ref = {'enabled': False}
- self.federation_api.update_sp(self.SP1, sp_ref)
-
- token = self.token_v3_helper.get_token_data(self.user_id, ['password'])
- ref = {}
- for r in (self.sp_beta, self.sp_gamma):
- ref.update(r)
- self._validate_service_providers(token, ref)
-
- def test_no_service_providers_in_token(self):
- """Test service catalog with disabled service providers.
-
- There should be no entry ``service_providers`` in the catalog.
- Test passes providing no attribute was raised.
-
- """
- sp_ref = {'enabled': False}
- for sp in (self.SP1, self.SP2, self.SP3):
- self.federation_api.update_sp(sp, sp_ref)
-
- token = self.token_v3_helper.get_token_data(self.user_id, ['password'])
- self.assertNotIn('service_providers', token['token'],
- message=('Expected Service Catalog not to have '
- 'service_providers'))
diff --git a/keystone-moon/keystone/tests/unit/test_v3_filters.py b/keystone-moon/keystone/tests/unit/test_v3_filters.py
deleted file mode 100644
index 9dc19af5..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_filters.py
+++ /dev/null
@@ -1,435 +0,0 @@
-# Copyright 2012 OpenStack LLC
-# Copyright 2013 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from oslo_config import cfg
-from oslo_serialization import jsonutils
-from six.moves import range
-
-from keystone.tests import unit
-from keystone.tests.unit import filtering
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit.ksfixtures import temporaryfile
-from keystone.tests.unit import test_v3
-
-
-CONF = cfg.CONF
-
-
-class IdentityTestFilteredCase(filtering.FilterTests,
- test_v3.RestfulTestCase):
- """Test filter enforcement on the v3 Identity API."""
-
- def _policy_fixture(self):
- return ksfixtures.Policy(self.tmpfilename, self.config_fixture)
-
- def setUp(self):
- """Setup for Identity Filter Test Cases."""
- self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
- self.tmpfilename = self.tempfile.file_name
- super(IdentityTestFilteredCase, self).setUp()
-
- def load_sample_data(self):
- """Create sample data for these tests.
-
- As well as the usual housekeeping, create a set of domains,
- users, roles and projects for the subsequent tests:
-
- - Three domains: A,B & C. C is disabled.
- - DomainA has user1, DomainB has user2 and user3
- - DomainA has group1 and group2, DomainB has group3
- - User1 has a role on DomainA
-
- Remember that there will also be a fourth domain in existence,
- the default domain.
-
- """
- # Start by creating a few domains
- self._populate_default_domain()
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.domainC = unit.new_domain_ref()
- self.domainC['enabled'] = False
- self.resource_api.create_domain(self.domainC['id'], self.domainC)
-
- # Now create some users, one in domainA and two of them in domainB
- self.user1 = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
- self.user2 = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
- self.user3 = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
-
- self.role = unit.new_role_ref()
- self.role_api.create_role(self.role['id'], self.role)
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user1['id'],
- domain_id=self.domainA['id'])
-
- # A default auth request we can use - un-scoped user token
- self.auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'])
-
- def _get_id_list_from_ref_list(self, ref_list):
- result_list = []
- for x in ref_list:
- result_list.append(x['id'])
- return result_list
-
- def _set_policy(self, new_policy):
- with open(self.tmpfilename, "w") as policyfile:
- policyfile.write(jsonutils.dumps(new_policy))
-
- def test_list_users_filtered_by_domain(self):
- """GET /users?domain_id=mydomain (filtered)
-
- Test Plan:
-
- - Update policy so api is unprotected
- - Use an un-scoped token to make sure we can filter the
- users by domainB, getting back the 2 users in that domain
-
- """
- self._set_policy({"identity:list_users": []})
- url_by_name = '/users?domain_id=%s' % self.domainB['id']
- r = self.get(url_by_name, auth=self.auth)
- # We should get back two users, those in DomainB
- id_list = self._get_id_list_from_ref_list(r.result.get('users'))
- self.assertIn(self.user2['id'], id_list)
- self.assertIn(self.user3['id'], id_list)
-
- def test_list_filtered_domains(self):
- """GET /domains?enabled=0
-
- Test Plan:
-
- - Update policy for no protection on api
- - Filter by the 'enabled' boolean to get disabled domains, which
- should return just domainC
- - Try the filter using different ways of specifying True/False
- to test that our handling of booleans in filter matching is
- correct
-
- """
- new_policy = {"identity:list_domains": []}
- self._set_policy(new_policy)
- r = self.get('/domains?enabled=0', auth=self.auth)
- id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
- self.assertEqual(1, len(id_list))
- self.assertIn(self.domainC['id'], id_list)
-
- # Try a few ways of specifying 'false'
- for val in ('0', 'false', 'False', 'FALSE', 'n', 'no', 'off'):
- r = self.get('/domains?enabled=%s' % val, auth=self.auth)
- id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
- self.assertEqual([self.domainC['id']], id_list)
-
- # Now try a few ways of specifying 'true' when we should get back
- # the other two domains, plus the default domain
- for val in ('1', 'true', 'True', 'TRUE', 'y', 'yes', 'on'):
- r = self.get('/domains?enabled=%s' % val, auth=self.auth)
- id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
- self.assertEqual(3, len(id_list))
- self.assertIn(self.domainA['id'], id_list)
- self.assertIn(self.domainB['id'], id_list)
- self.assertIn(CONF.identity.default_domain_id, id_list)
-
- r = self.get('/domains?enabled', auth=self.auth)
- id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
- self.assertEqual(3, len(id_list))
- self.assertIn(self.domainA['id'], id_list)
- self.assertIn(self.domainB['id'], id_list)
- self.assertIn(CONF.identity.default_domain_id, id_list)
-
- def test_multiple_filters(self):
- """GET /domains?enabled&name=myname
-
- Test Plan:
-
- - Update policy for no protection on api
- - Filter by the 'enabled' boolean and name - this should
- return a single domain
-
- """
- new_policy = {"identity:list_domains": []}
- self._set_policy(new_policy)
-
- my_url = '/domains?enabled&name=%s' % self.domainA['name']
- r = self.get(my_url, auth=self.auth)
- id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
- self.assertEqual(1, len(id_list))
- self.assertIn(self.domainA['id'], id_list)
- self.assertIs(True, r.result.get('domains')[0]['enabled'])
-
- def test_invalid_filter_is_ignored(self):
- """GET /domains?enableds&name=myname
-
- Test Plan:
-
- - Update policy for no protection on api
- - Filter by name and 'enableds', which does not exist
- - Assert 'enableds' is ignored
-
- """
- new_policy = {"identity:list_domains": []}
- self._set_policy(new_policy)
-
- my_url = '/domains?enableds=0&name=%s' % self.domainA['name']
- r = self.get(my_url, auth=self.auth)
- id_list = self._get_id_list_from_ref_list(r.result.get('domains'))
-
- # domainA is returned and it is enabled, since enableds=0 is not the
- # same as enabled=0
- self.assertEqual(1, len(id_list))
- self.assertIn(self.domainA['id'], id_list)
- self.assertIs(True, r.result.get('domains')[0]['enabled'])
-
- def test_list_users_filtered_by_funny_name(self):
- """GET /users?name=%myname%
-
- Test Plan:
-
- - Update policy so api is unprotected
- - Update a user with name that has filter escape characters
- - Ensure we can filter on it
-
- """
- self._set_policy({"identity:list_users": []})
- user = self.user1
- user['name'] = '%my%name%'
- self.identity_api.update_user(user['id'], user)
-
- url_by_name = '/users?name=%my%name%'
- r = self.get(url_by_name, auth=self.auth)
-
- self.assertEqual(1, len(r.result.get('users')))
- self.assertEqual(user['id'], r.result.get('users')[0]['id'])
-
- def test_inexact_filters(self):
- # Create 20 users
- user_list = self._create_test_data('user', 20)
- # Set up some names that we can filter on
- user = user_list[5]
- user['name'] = 'The'
- self.identity_api.update_user(user['id'], user)
- user = user_list[6]
- user['name'] = 'The Ministry'
- self.identity_api.update_user(user['id'], user)
- user = user_list[7]
- user['name'] = 'The Ministry of'
- self.identity_api.update_user(user['id'], user)
- user = user_list[8]
- user['name'] = 'The Ministry of Silly'
- self.identity_api.update_user(user['id'], user)
- user = user_list[9]
- user['name'] = 'The Ministry of Silly Walks'
- self.identity_api.update_user(user['id'], user)
- # ...and one for useful case insensitivity testing
- user = user_list[10]
- user['name'] = 'the ministry of silly walks OF'
- self.identity_api.update_user(user['id'], user)
-
- self._set_policy({"identity:list_users": []})
-
- url_by_name = '/users?name__contains=Ministry'
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(4, len(r.result.get('users')))
- self._match_with_list(r.result.get('users'), user_list,
- list_start=6, list_end=10)
-
- url_by_name = '/users?name__icontains=miNIstry'
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(5, len(r.result.get('users')))
- self._match_with_list(r.result.get('users'), user_list,
- list_start=6, list_end=11)
-
- url_by_name = '/users?name__startswith=The'
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(5, len(r.result.get('users')))
- self._match_with_list(r.result.get('users'), user_list,
- list_start=5, list_end=10)
-
- url_by_name = '/users?name__istartswith=the'
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(6, len(r.result.get('users')))
- self._match_with_list(r.result.get('users'), user_list,
- list_start=5, list_end=11)
-
- url_by_name = '/users?name__endswith=of'
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(1, len(r.result.get('users')))
- self.assertEqual(r.result.get('users')[0]['id'], user_list[7]['id'])
-
- url_by_name = '/users?name__iendswith=OF'
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(2, len(r.result.get('users')))
- self.assertEqual(user_list[7]['id'], r.result.get('users')[0]['id'])
- self.assertEqual(user_list[10]['id'], r.result.get('users')[1]['id'])
-
- self._delete_test_data('user', user_list)
-
- def test_filter_sql_injection_attack(self):
- """GET /users?name=<injected sql_statement>
-
- Test Plan:
-
- - Attempt to get all entities back by passing a two-term attribute
- - Attempt to piggyback filter to damage DB (e.g. drop table)
-
- """
- self._set_policy({"identity:list_users": [],
- "identity:list_groups": [],
- "identity:create_group": []})
-
- url_by_name = "/users?name=anything' or 'x'='x"
- r = self.get(url_by_name, auth=self.auth)
-
- self.assertEqual(0, len(r.result.get('users')))
-
- # See if we can add a SQL command...use the group table instead of the
- # user table since 'user' is reserved word for SQLAlchemy.
- group = unit.new_group_ref(domain_id=self.domainB['id'])
- group = self.identity_api.create_group(group)
-
- url_by_name = "/users?name=x'; drop table group"
- r = self.get(url_by_name, auth=self.auth)
-
- # Check group table is still there...
- url_by_name = "/groups"
- r = self.get(url_by_name, auth=self.auth)
- self.assertTrue(len(r.result.get('groups')) > 0)
-
-
-class IdentityTestListLimitCase(IdentityTestFilteredCase):
- """Test list limiting enforcement on the v3 Identity API."""
-
- content_type = 'json'
-
- def setUp(self):
- """Setup for Identity Limit Test Cases."""
- super(IdentityTestListLimitCase, self).setUp()
-
- # Create 10 entries for each of the entities we are going to test
- self.ENTITY_TYPES = ['user', 'group', 'project']
- self.entity_lists = {}
- for entity in self.ENTITY_TYPES:
- self.entity_lists[entity] = self._create_test_data(entity, 10)
- # Make sure we clean up when finished
- self.addCleanup(self.clean_up_entity, entity)
-
- self.service_list = []
- self.addCleanup(self.clean_up_service)
- for _ in range(10):
- new_entity = unit.new_service_ref()
- service = self.catalog_api.create_service(new_entity['id'],
- new_entity)
- self.service_list.append(service)
-
- self.policy_list = []
- self.addCleanup(self.clean_up_policy)
- for _ in range(10):
- new_entity = unit.new_policy_ref()
- policy = self.policy_api.create_policy(new_entity['id'],
- new_entity)
- self.policy_list.append(policy)
-
- def clean_up_entity(self, entity):
- """Clean up entity test data from Identity Limit Test Cases."""
- self._delete_test_data(entity, self.entity_lists[entity])
-
- def clean_up_service(self):
- """Clean up service test data from Identity Limit Test Cases."""
- for service in self.service_list:
- self.catalog_api.delete_service(service['id'])
-
- def clean_up_policy(self):
- """Clean up policy test data from Identity Limit Test Cases."""
- for policy in self.policy_list:
- self.policy_api.delete_policy(policy['id'])
-
- def _test_entity_list_limit(self, entity, driver):
- """GET /<entities> (limited)
-
- Test Plan:
-
- - For the specified type of entity:
- - Update policy for no protection on api
- - Add a bunch of entities
- - Set the global list limit to 5, and check that getting all
- - entities only returns 5
- - Set the driver list_limit to 4, and check that now only 4 are
- - returned
-
- """
- if entity == 'policy':
- plural = 'policies'
- else:
- plural = '%ss' % entity
-
- self._set_policy({"identity:list_%s" % plural: []})
- self.config_fixture.config(list_limit=5)
- self.config_fixture.config(group=driver, list_limit=None)
- r = self.get('/%s' % plural, auth=self.auth)
- self.assertEqual(5, len(r.result.get(plural)))
- self.assertIs(r.result.get('truncated'), True)
-
- self.config_fixture.config(group=driver, list_limit=4)
- r = self.get('/%s' % plural, auth=self.auth)
- self.assertEqual(4, len(r.result.get(plural)))
- self.assertIs(r.result.get('truncated'), True)
-
- def test_users_list_limit(self):
- self._test_entity_list_limit('user', 'identity')
-
- def test_groups_list_limit(self):
- self._test_entity_list_limit('group', 'identity')
-
- def test_projects_list_limit(self):
- self._test_entity_list_limit('project', 'resource')
-
- def test_services_list_limit(self):
- self._test_entity_list_limit('service', 'catalog')
-
- def test_non_driver_list_limit(self):
- """Check list can be limited without driver level support.
-
- Policy limiting is not done at the driver level (since it
- really isn't worth doing it there). So use this as a test
- for ensuring the controller level will successfully limit
- in this case.
-
- """
- self._test_entity_list_limit('policy', 'policy')
-
- def test_no_limit(self):
- """Check truncated attribute not set when list not limited."""
- self._set_policy({"identity:list_services": []})
- r = self.get('/services', auth=self.auth)
- self.assertEqual(10, len(r.result.get('services')))
- self.assertIsNone(r.result.get('truncated'))
-
- def test_at_limit(self):
- """Check truncated attribute not set when list at max size."""
- # Test this by overriding the general limit with a higher
- # driver-specific limit (allowing all entities to be returned
- # in the collection), which should result in a non truncated list
- self._set_policy({"identity:list_services": []})
- self.config_fixture.config(list_limit=5)
- self.config_fixture.config(group='catalog', list_limit=10)
- r = self.get('/services', auth=self.auth)
- self.assertEqual(10, len(r.result.get('services')))
- self.assertIsNone(r.result.get('truncated'))
diff --git a/keystone-moon/keystone/tests/unit/test_v3_identity.py b/keystone-moon/keystone/tests/unit/test_v3_identity.py
deleted file mode 100644
index 7d3f6cad..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_identity.py
+++ /dev/null
@@ -1,795 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import logging
-import uuid
-
-import fixtures
-import mock
-from oslo_config import cfg
-from six.moves import http_client
-from testtools import matchers
-
-from keystone.common import controller
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-CONF = cfg.CONF
-
-
-# NOTE(morganfainberg): To be removed when admin_token_auth middleware is
-# removed. This was moved to it's own testcase so it can setup the
-# admin_token_auth pipeline without impacting other tests.
-class IdentityTestCaseStaticAdminToken(test_v3.RestfulTestCase):
- EXTENSION_TO_ADD = 'admin_token_auth'
-
- def config_overrides(self):
- super(IdentityTestCaseStaticAdminToken, self).config_overrides()
- self.config_fixture.config(
- admin_token='ADMIN')
-
- def test_list_users_with_static_admin_token_and_multiple_backends(self):
- # domain-specific operations with the bootstrap ADMIN token is
- # disallowed when domain-specific drivers are enabled
- self.config_fixture.config(group='identity',
- domain_specific_drivers_enabled=True)
- self.get('/users', token=CONF.admin_token,
- expected_status=exception.Unauthorized.code)
-
- def test_create_user_with_admin_token_and_no_domain(self):
- """Call ``POST /users`` with admin token but no domain id.
-
- It should not be possible to use the admin token to create a user
- while not explicitly passing the domain in the request body.
-
- """
- # Passing a valid domain id to new_user_ref() since domain_id is
- # not an optional parameter.
- ref = unit.new_user_ref(domain_id=self.domain_id)
- # Delete the domain id before sending the request.
- del ref['domain_id']
- self.post('/users', body={'user': ref}, token=CONF.admin_token,
- expected_status=http_client.BAD_REQUEST)
-
-
-class IdentityTestCase(test_v3.RestfulTestCase):
- """Test users and groups."""
-
- def setUp(self):
- super(IdentityTestCase, self).setUp()
-
- self.group = unit.new_group_ref(domain_id=self.domain_id)
- self.group = self.identity_api.create_group(self.group)
- self.group_id = self.group['id']
-
- self.credential = unit.new_credential_ref(
- user_id=self.user['id'],
- project_id=self.project_id)
-
- self.credential_api.create_credential(self.credential['id'],
- self.credential)
-
- # user crud tests
-
- def test_create_user(self):
- """Call ``POST /users``."""
- ref = unit.new_user_ref(domain_id=self.domain_id)
- r = self.post(
- '/users',
- body={'user': ref})
- return self.assertValidUserResponse(r, ref)
-
- def test_create_user_without_domain(self):
- """Call ``POST /users`` without specifying domain.
-
- According to the identity-api specification, if you do not
- explicitly specific the domain_id in the entity, it should
- take the domain scope of the token as the domain_id.
-
- """
- # Create a user with a role on the domain so we can get a
- # domain scoped token
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
- user = unit.create_user(self.identity_api, domain_id=domain['id'])
- self.assignment_api.create_grant(
- role_id=self.role_id, user_id=user['id'],
- domain_id=domain['id'])
-
- ref = unit.new_user_ref(domain_id=domain['id'])
- ref_nd = ref.copy()
- ref_nd.pop('domain_id')
- auth = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- domain_id=domain['id'])
- r = self.post('/users', body={'user': ref_nd}, auth=auth)
- self.assertValidUserResponse(r, ref)
-
- # Now try the same thing without a domain token - which should fail
- ref = unit.new_user_ref(domain_id=domain['id'])
- ref_nd = ref.copy()
- ref_nd.pop('domain_id')
- auth = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
-
- # TODO(henry-nash): Due to bug #1283539 we currently automatically
- # use the default domain_id if a domain scoped token is not being
- # used. For now we just check that a deprecation warning has been
- # issued. Change the code below to expect a failure once this bug is
- # fixed.
- with mock.patch(
- 'oslo_log.versionutils.report_deprecated_feature') as mock_dep:
- r = self.post('/users', body={'user': ref_nd}, auth=auth)
- self.assertTrue(mock_dep.called)
-
- ref['domain_id'] = CONF.identity.default_domain_id
- return self.assertValidUserResponse(r, ref)
-
- def test_create_user_with_admin_token_and_domain(self):
- """Call ``POST /users`` with admin token and domain id."""
- ref = unit.new_user_ref(domain_id=self.domain_id)
- self.post('/users', body={'user': ref}, token=self.get_admin_token(),
- expected_status=http_client.CREATED)
-
- def test_user_management_normalized_keys(self):
- """Illustrate the inconsistent handling of hyphens in keys.
-
- To quote Morgan in bug 1526244:
-
- the reason this is converted from "domain-id" to "domain_id" is
- because of how we process/normalize data. The way we have to handle
- specific data types for known columns requires avoiding "-" in the
- actual python code since "-" is not valid for attributes in python
- w/o significant use of "getattr" etc.
-
- In short, historically we handle some things in conversions. The
- use of "extras" has long been a poor design choice that leads to
- odd/strange inconsistent behaviors because of other choices made in
- handling data from within the body. (In many cases we convert from
- "-" to "_" throughout openstack)
-
- Source: https://bugs.launchpad.net/keystone/+bug/1526244/comments/9
-
- """
- # Create two domains to work with.
- domain1 = unit.new_domain_ref()
- self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
-
- # We can successfully create a normal user without any surprises.
- user = unit.new_user_ref(domain_id=domain1['id'])
- r = self.post(
- '/users',
- body={'user': user})
- self.assertValidUserResponse(r, user)
- user['id'] = r.json['user']['id']
-
- # Query strings are not normalized: so we get all users back (like
- # self.user), not just the ones in the specified domain.
- r = self.get(
- '/users?domain-id=%s' % domain1['id'])
- self.assertValidUserListResponse(r, ref=self.user)
- self.assertNotEqual(domain1['id'], self.user['domain_id'])
-
- # When creating a new user, if we move the 'domain_id' into the
- # 'domain-id' attribute, the server will normalize the request
- # attribute, and effectively "move it back" for us.
- user = unit.new_user_ref(domain_id=domain1['id'])
- user['domain-id'] = user.pop('domain_id')
- r = self.post(
- '/users',
- body={'user': user})
- self.assertNotIn('domain-id', r.json['user'])
- self.assertEqual(domain1['id'], r.json['user']['domain_id'])
- # (move this attribute back so we can use assertValidUserResponse)
- user['domain_id'] = user.pop('domain-id')
- self.assertValidUserResponse(r, user)
- user['id'] = r.json['user']['id']
-
- # If we try updating the user's 'domain_id' by specifying a
- # 'domain-id', then it'll be stored into extras rather than normalized,
- # and the user's actual 'domain_id' is not affected.
- r = self.patch(
- '/users/%s' % user['id'],
- body={'user': {'domain-id': domain2['id']}})
- self.assertEqual(domain2['id'], r.json['user']['domain-id'])
- self.assertEqual(user['domain_id'], r.json['user']['domain_id'])
- self.assertNotEqual(domain2['id'], user['domain_id'])
- self.assertValidUserResponse(r, user)
-
- def test_create_user_bad_request(self):
- """Call ``POST /users``."""
- self.post('/users', body={'user': {}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_users(self):
- """Call ``GET /users``."""
- resource_url = '/users'
- r = self.get(resource_url)
- self.assertValidUserListResponse(r, ref=self.user,
- resource_url=resource_url)
-
- def test_list_users_with_multiple_backends(self):
- """Call ``GET /users`` when multiple backends is enabled.
-
- In this scenario, the controller requires a domain to be specified
- either as a filter or by using a domain scoped token.
-
- """
- self.config_fixture.config(group='identity',
- domain_specific_drivers_enabled=True)
-
- # Create a new domain with a new project and user
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
-
- project = unit.new_project_ref(domain_id=domain['id'])
- self.resource_api.create_project(project['id'], project)
-
- user = unit.create_user(self.identity_api, domain_id=domain['id'])
-
- # Create both project and domain role grants for the user so we
- # can get both project and domain scoped tokens
- self.assignment_api.create_grant(
- role_id=self.role_id, user_id=user['id'],
- domain_id=domain['id'])
- self.assignment_api.create_grant(
- role_id=self.role_id, user_id=user['id'],
- project_id=project['id'])
-
- dom_auth = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- domain_id=domain['id'])
- project_auth = self.build_authentication_request(
- user_id=user['id'],
- password=user['password'],
- project_id=project['id'])
-
- # First try using a domain scoped token
- resource_url = '/users'
- r = self.get(resource_url, auth=dom_auth)
- self.assertValidUserListResponse(r, ref=user,
- resource_url=resource_url)
-
- # Now try using a project scoped token
- resource_url = '/users'
- r = self.get(resource_url, auth=project_auth)
- self.assertValidUserListResponse(r, ref=user,
- resource_url=resource_url)
-
- # Now try with an explicit filter
- resource_url = ('/users?domain_id=%(domain_id)s' %
- {'domain_id': domain['id']})
- r = self.get(resource_url)
- self.assertValidUserListResponse(r, ref=user,
- resource_url=resource_url)
-
- def test_list_users_no_default_project(self):
- """Call ``GET /users`` making sure no default_project_id."""
- user = unit.new_user_ref(self.domain_id)
- user = self.identity_api.create_user(user)
- resource_url = '/users'
- r = self.get(resource_url)
- self.assertValidUserListResponse(r, ref=user,
- resource_url=resource_url)
-
- def test_get_user(self):
- """Call ``GET /users/{user_id}``."""
- r = self.get('/users/%(user_id)s' % {
- 'user_id': self.user['id']})
- self.assertValidUserResponse(r, self.user)
-
- def test_get_user_with_default_project(self):
- """Call ``GET /users/{user_id}`` making sure of default_project_id."""
- user = unit.new_user_ref(domain_id=self.domain_id,
- project_id=self.project_id)
- user = self.identity_api.create_user(user)
- r = self.get('/users/%(user_id)s' % {'user_id': user['id']})
- self.assertValidUserResponse(r, user)
-
- def test_add_user_to_group(self):
- """Call ``PUT /groups/{group_id}/users/{user_id}``."""
- self.put('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': self.user['id']})
-
- def test_list_groups_for_user(self):
- """Call ``GET /users/{user_id}/groups``."""
- user1 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
- user2 = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
-
- self.put('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': user1['id']})
-
- # Scenarios below are written to test the default policy configuration
-
- # One should be allowed to list one's own groups
- auth = self.build_authentication_request(
- user_id=user1['id'],
- password=user1['password'])
- resource_url = ('/users/%(user_id)s/groups' %
- {'user_id': user1['id']})
- r = self.get(resource_url, auth=auth)
- self.assertValidGroupListResponse(r, ref=self.group,
- resource_url=resource_url)
-
- # Administrator is allowed to list others' groups
- resource_url = ('/users/%(user_id)s/groups' %
- {'user_id': user1['id']})
- r = self.get(resource_url)
- self.assertValidGroupListResponse(r, ref=self.group,
- resource_url=resource_url)
-
- # Ordinary users should not be allowed to list other's groups
- auth = self.build_authentication_request(
- user_id=user2['id'],
- password=user2['password'])
- r = self.get('/users/%(user_id)s/groups' % {
- 'user_id': user1['id']}, auth=auth,
- expected_status=exception.ForbiddenAction.code)
-
- def test_check_user_in_group(self):
- """Call ``HEAD /groups/{group_id}/users/{user_id}``."""
- self.put('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': self.user['id']})
- self.head('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': self.user['id']})
-
- def test_list_users_in_group(self):
- """Call ``GET /groups/{group_id}/users``."""
- self.put('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': self.user['id']})
- resource_url = ('/groups/%(group_id)s/users' %
- {'group_id': self.group_id})
- r = self.get(resource_url)
- self.assertValidUserListResponse(r, ref=self.user,
- resource_url=resource_url)
- self.assertIn('/groups/%(group_id)s/users' % {
- 'group_id': self.group_id}, r.result['links']['self'])
-
- def test_remove_user_from_group(self):
- """Call ``DELETE /groups/{group_id}/users/{user_id}``."""
- self.put('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': self.user['id']})
- self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': self.user['id']})
-
- def test_update_user(self):
- """Call ``PATCH /users/{user_id}``."""
- user = unit.new_user_ref(domain_id=self.domain_id)
- del user['id']
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': self.user['id']},
- body={'user': user})
- self.assertValidUserResponse(r, user)
-
- def test_admin_password_reset(self):
- # bootstrap a user as admin
- user_ref = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
-
- # auth as user should work before a password change
- old_password_auth = self.build_authentication_request(
- user_id=user_ref['id'],
- password=user_ref['password'])
- r = self.v3_create_token(old_password_auth)
- old_token = r.headers.get('X-Subject-Token')
-
- # auth as user with a token should work before a password change
- old_token_auth = self.build_authentication_request(token=old_token)
- self.v3_create_token(old_token_auth)
-
- # administrative password reset
- new_password = uuid.uuid4().hex
- self.patch('/users/%s' % user_ref['id'],
- body={'user': {'password': new_password}})
-
- # auth as user with original password should not work after change
- self.v3_create_token(old_password_auth,
- expected_status=http_client.UNAUTHORIZED)
-
- # auth as user with an old token should not work after change
- self.v3_create_token(old_token_auth,
- expected_status=http_client.NOT_FOUND)
-
- # new password should work
- new_password_auth = self.build_authentication_request(
- user_id=user_ref['id'],
- password=new_password)
- self.v3_create_token(new_password_auth)
-
- def test_update_user_domain_id(self):
- """Call ``PATCH /users/{user_id}`` with domain_id."""
- user = unit.new_user_ref(domain_id=self.domain['id'])
- user = self.identity_api.create_user(user)
- user['domain_id'] = CONF.identity.default_domain_id
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': user['id']},
- body={'user': user},
- expected_status=exception.ValidationError.code)
- self.config_fixture.config(domain_id_immutable=False)
- user['domain_id'] = self.domain['id']
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': user['id']},
- body={'user': user})
- self.assertValidUserResponse(r, user)
-
- def test_delete_user(self):
- """Call ``DELETE /users/{user_id}``.
-
- As well as making sure the delete succeeds, we ensure
- that any credentials that reference this user are
- also deleted, while other credentials are unaffected.
- In addition, no tokens should remain valid for this user.
-
- """
- # First check the credential for this user is present
- r = self.credential_api.get_credential(self.credential['id'])
- self.assertDictEqual(self.credential, r)
- # Create a second credential with a different user
-
- user2 = unit.new_user_ref(domain_id=self.domain['id'],
- project_id=self.project['id'])
- user2 = self.identity_api.create_user(user2)
- credential2 = unit.new_credential_ref(user_id=user2['id'],
- project_id=self.project['id'])
- self.credential_api.create_credential(credential2['id'], credential2)
-
- # Create a token for this user which we can check later
- # gets deleted
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project['id'])
- token = self.get_requested_token(auth_data)
- # Confirm token is valid for now
- self.head('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
-
- # Now delete the user
- self.delete('/users/%(user_id)s' % {
- 'user_id': self.user['id']})
-
- # Deleting the user should have deleted any credentials
- # that reference this project
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- self.credential['id'])
- # And the no tokens we remain valid
- tokens = self.token_provider_api._persistence._list_tokens(
- self.user['id'])
- self.assertEqual(0, len(tokens))
- # But the credential for user2 is unaffected
- r = self.credential_api.get_credential(credential2['id'])
- self.assertDictEqual(credential2, r)
-
- # shadow user tests
- def test_shadow_federated_user(self):
- fed_user = unit.new_federated_user_ref()
- user = (
- self.identity_api.shadow_federated_user(fed_user["idp_id"],
- fed_user["protocol_id"],
- fed_user["unique_id"],
- fed_user["display_name"])
- )
- self.assertIsNotNone(user["id"])
- self.assertEqual(len(user.keys()), 4)
- self.assertIsNotNone(user['id'])
- self.assertIsNotNone(user['name'])
- self.assertIsNone(user['domain_id'])
- self.assertEqual(user['enabled'], True)
-
- def test_shadow_existing_federated_user(self):
- fed_user = unit.new_federated_user_ref()
-
- # introduce the user to keystone for the first time
- shadow_user1 = self.identity_api.shadow_federated_user(
- fed_user["idp_id"],
- fed_user["protocol_id"],
- fed_user["unique_id"],
- fed_user["display_name"])
- self.assertEqual(fed_user['display_name'], shadow_user1['name'])
-
- # shadow the user again, with another name to invalidate the cache
- # internally, this operation causes request to the driver. It should
- # not fail.
- fed_user['display_name'] = uuid.uuid4().hex
- shadow_user2 = self.identity_api.shadow_federated_user(
- fed_user["idp_id"],
- fed_user["protocol_id"],
- fed_user["unique_id"],
- fed_user["display_name"])
- self.assertEqual(fed_user['display_name'], shadow_user2['name'])
- self.assertNotEqual(shadow_user1['name'], shadow_user2['name'])
-
- # The shadowed users still share the same unique ID.
- self.assertEqual(shadow_user1['id'], shadow_user2['id'])
-
- # group crud tests
-
- def test_create_group(self):
- """Call ``POST /groups``."""
- # Create a new group to avoid a duplicate check failure
- ref = unit.new_group_ref(domain_id=self.domain_id)
- r = self.post(
- '/groups',
- body={'group': ref})
- return self.assertValidGroupResponse(r, ref)
-
- def test_create_group_bad_request(self):
- """Call ``POST /groups``."""
- self.post('/groups', body={'group': {}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_groups(self):
- """Call ``GET /groups``."""
- resource_url = '/groups'
- r = self.get(resource_url)
- self.assertValidGroupListResponse(r, ref=self.group,
- resource_url=resource_url)
-
- def test_get_group(self):
- """Call ``GET /groups/{group_id}``."""
- r = self.get('/groups/%(group_id)s' % {
- 'group_id': self.group_id})
- self.assertValidGroupResponse(r, self.group)
-
- def test_update_group(self):
- """Call ``PATCH /groups/{group_id}``."""
- group = unit.new_group_ref(domain_id=self.domain_id)
- del group['id']
- r = self.patch('/groups/%(group_id)s' % {
- 'group_id': self.group_id},
- body={'group': group})
- self.assertValidGroupResponse(r, group)
-
- def test_update_group_domain_id(self):
- """Call ``PATCH /groups/{group_id}`` with domain_id."""
- self.group['domain_id'] = CONF.identity.default_domain_id
- r = self.patch('/groups/%(group_id)s' % {
- 'group_id': self.group['id']},
- body={'group': self.group},
- expected_status=exception.ValidationError.code)
- self.config_fixture.config(domain_id_immutable=False)
- self.group['domain_id'] = self.domain['id']
- r = self.patch('/groups/%(group_id)s' % {
- 'group_id': self.group['id']},
- body={'group': self.group})
- self.assertValidGroupResponse(r, self.group)
-
- def test_delete_group(self):
- """Call ``DELETE /groups/{group_id}``."""
- self.delete('/groups/%(group_id)s' % {
- 'group_id': self.group_id})
-
- def test_create_user_password_not_logged(self):
- # When a user is created, the password isn't logged at any level.
-
- log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
-
- ref = unit.new_user_ref(domain_id=self.domain_id)
- self.post(
- '/users',
- body={'user': ref})
-
- self.assertNotIn(ref['password'], log_fix.output)
-
- def test_update_password_not_logged(self):
- # When admin modifies user password, the password isn't logged at any
- # level.
-
- log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
-
- # bootstrap a user as admin
- user_ref = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
-
- self.assertNotIn(user_ref['password'], log_fix.output)
-
- # administrative password reset
- new_password = uuid.uuid4().hex
- self.patch('/users/%s' % user_ref['id'],
- body={'user': {'password': new_password}})
-
- self.assertNotIn(new_password, log_fix.output)
-
-
-class IdentityV3toV2MethodsTestCase(unit.TestCase):
- """Test users V3 to V2 conversion methods."""
-
- def new_user_ref(self, **kwargs):
- """Construct a bare bones user ref.
-
- Omits all optional components.
- """
- ref = unit.new_user_ref(**kwargs)
- # description is already omitted
- del ref['email']
- del ref['enabled']
- del ref['password']
- return ref
-
- def setUp(self):
- super(IdentityV3toV2MethodsTestCase, self).setUp()
- self.load_backends()
- user_id = uuid.uuid4().hex
- project_id = uuid.uuid4().hex
-
- # User with only default_project_id in ref
- self.user1 = self.new_user_ref(
- id=user_id,
- name=user_id,
- project_id=project_id,
- domain_id=CONF.identity.default_domain_id)
- # User without default_project_id or tenantId in ref
- self.user2 = self.new_user_ref(
- id=user_id,
- name=user_id,
- domain_id=CONF.identity.default_domain_id)
- # User with both tenantId and default_project_id in ref
- self.user3 = self.new_user_ref(
- id=user_id,
- name=user_id,
- project_id=project_id,
- tenantId=project_id,
- domain_id=CONF.identity.default_domain_id)
- # User with only tenantId in ref
- self.user4 = self.new_user_ref(
- id=user_id,
- name=user_id,
- tenantId=project_id,
- domain_id=CONF.identity.default_domain_id)
-
- # Expected result if the user is meant to have a tenantId element
- self.expected_user = {'id': user_id,
- 'name': user_id,
- 'username': user_id,
- 'tenantId': project_id}
-
- # Expected result if the user is not meant to have a tenantId element
- self.expected_user_no_tenant_id = {'id': user_id,
- 'name': user_id,
- 'username': user_id}
-
- def test_v3_to_v2_user_method(self):
-
- updated_user1 = controller.V2Controller.v3_to_v2_user(self.user1)
- self.assertIs(self.user1, updated_user1)
- self.assertDictEqual(self.expected_user, self.user1)
- updated_user2 = controller.V2Controller.v3_to_v2_user(self.user2)
- self.assertIs(self.user2, updated_user2)
- self.assertDictEqual(self.expected_user_no_tenant_id, self.user2)
- updated_user3 = controller.V2Controller.v3_to_v2_user(self.user3)
- self.assertIs(self.user3, updated_user3)
- self.assertDictEqual(self.expected_user, self.user3)
- updated_user4 = controller.V2Controller.v3_to_v2_user(self.user4)
- self.assertIs(self.user4, updated_user4)
- self.assertDictEqual(self.expected_user_no_tenant_id, self.user4)
-
- def test_v3_to_v2_user_method_list(self):
- user_list = [self.user1, self.user2, self.user3, self.user4]
- updated_list = controller.V2Controller.v3_to_v2_user(user_list)
-
- self.assertEqual(len(user_list), len(updated_list))
-
- for i, ref in enumerate(updated_list):
- # Order should not change.
- self.assertIs(ref, user_list[i])
-
- self.assertDictEqual(self.expected_user, self.user1)
- self.assertDictEqual(self.expected_user_no_tenant_id, self.user2)
- self.assertDictEqual(self.expected_user, self.user3)
- self.assertDictEqual(self.expected_user_no_tenant_id, self.user4)
-
-
-class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase):
-
- def setUp(self):
- super(UserSelfServiceChangingPasswordsTestCase, self).setUp()
- self.user_ref = unit.create_user(self.identity_api,
- domain_id=self.domain['id'])
- self.token = self.get_request_token(self.user_ref['password'],
- http_client.CREATED)
-
- def get_request_token(self, password, expected_status):
- auth_data = self.build_authentication_request(
- user_id=self.user_ref['id'],
- password=password)
- r = self.v3_create_token(auth_data,
- expected_status=expected_status)
- return r.headers.get('X-Subject-Token')
-
- def change_password(self, expected_status, **kwargs):
- """Returns a test response for a change password request."""
- return self.post('/users/%s/password' % self.user_ref['id'],
- body={'user': kwargs},
- token=self.token,
- expected_status=expected_status)
-
- def test_changing_password(self):
- # original password works
- token_id = self.get_request_token(self.user_ref['password'],
- expected_status=http_client.CREATED)
- # original token works
- old_token_auth = self.build_authentication_request(token=token_id)
- self.v3_create_token(old_token_auth)
-
- # change password
- new_password = uuid.uuid4().hex
- self.change_password(password=new_password,
- original_password=self.user_ref['password'],
- expected_status=http_client.NO_CONTENT)
-
- # old password fails
- self.get_request_token(self.user_ref['password'],
- expected_status=http_client.UNAUTHORIZED)
-
- # old token fails
- self.v3_create_token(old_token_auth,
- expected_status=http_client.NOT_FOUND)
-
- # new password works
- self.get_request_token(new_password,
- expected_status=http_client.CREATED)
-
- def test_changing_password_with_missing_original_password_fails(self):
- r = self.change_password(password=uuid.uuid4().hex,
- expected_status=http_client.BAD_REQUEST)
- self.assertThat(r.result['error']['message'],
- matchers.Contains('original_password'))
-
- def test_changing_password_with_missing_password_fails(self):
- r = self.change_password(original_password=self.user_ref['password'],
- expected_status=http_client.BAD_REQUEST)
- self.assertThat(r.result['error']['message'],
- matchers.Contains('password'))
-
- def test_changing_password_with_incorrect_password_fails(self):
- self.change_password(password=uuid.uuid4().hex,
- original_password=uuid.uuid4().hex,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_changing_password_with_disabled_user_fails(self):
- # disable the user account
- self.user_ref['enabled'] = False
- self.patch('/users/%s' % self.user_ref['id'],
- body={'user': self.user_ref})
-
- self.change_password(password=uuid.uuid4().hex,
- original_password=self.user_ref['password'],
- expected_status=http_client.UNAUTHORIZED)
-
- def test_changing_password_not_logged(self):
- # When a user changes their password, the password isn't logged at any
- # level.
-
- log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
-
- # change password
- new_password = uuid.uuid4().hex
- self.change_password(password=new_password,
- original_password=self.user_ref['password'],
- expected_status=http_client.NO_CONTENT)
-
- self.assertNotIn(self.user_ref['password'], log_fix.output)
- self.assertNotIn(new_password, log_fix.output)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
deleted file mode 100644
index 198dffb8..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
+++ /dev/null
@@ -1,907 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-import mock
-from oslo_log import versionutils
-from oslo_serialization import jsonutils
-from pycadf import cadftaxonomy
-from six.moves import http_client
-from six.moves import urllib
-
-from keystone.contrib.oauth1 import routers
-from keystone import exception
-from keystone import oauth1
-from keystone.oauth1 import controllers
-from keystone.oauth1 import core
-from keystone.tests import unit
-from keystone.tests.unit.common import test_notifications
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit.ksfixtures import temporaryfile
-from keystone.tests.unit import test_v3
-
-
-class OAuth1ContribTests(test_v3.RestfulTestCase):
-
- @mock.patch.object(versionutils, 'report_deprecated_feature')
- def test_exception_happens(self, mock_deprecator):
- routers.OAuth1Extension(mock.ANY)
- mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
- args, _kwargs = mock_deprecator.call_args
- self.assertIn("Remove oauth1_extension from", args[1])
-
-
-class OAuth1Tests(test_v3.RestfulTestCase):
-
- CONSUMER_URL = '/OS-OAUTH1/consumers'
-
- def setUp(self):
- super(OAuth1Tests, self).setUp()
-
- # Now that the app has been served, we can query CONF values
- self.base_url = 'http://localhost/v3'
- self.controller = controllers.OAuthControllerV3()
-
- def _create_single_consumer(self):
- ref = {'description': uuid.uuid4().hex}
- resp = self.post(
- self.CONSUMER_URL,
- body={'consumer': ref})
- return resp.result['consumer']
-
- def _create_request_token(self, consumer, project_id):
- endpoint = '/OS-OAUTH1/request_token'
- client = oauth1.Client(consumer['key'],
- client_secret=consumer['secret'],
- signature_method=oauth1.SIG_HMAC,
- callback_uri="oob")
- headers = {'requested_project_id': project_id}
- url, headers, body = client.sign(self.base_url + endpoint,
- http_method='POST',
- headers=headers)
- return endpoint, headers
-
- def _create_access_token(self, consumer, token):
- endpoint = '/OS-OAUTH1/access_token'
- client = oauth1.Client(consumer['key'],
- client_secret=consumer['secret'],
- resource_owner_key=token.key,
- resource_owner_secret=token.secret,
- signature_method=oauth1.SIG_HMAC,
- verifier=token.verifier)
- url, headers, body = client.sign(self.base_url + endpoint,
- http_method='POST')
- headers.update({'Content-Type': 'application/json'})
- return endpoint, headers
-
- def _get_oauth_token(self, consumer, token):
- client = oauth1.Client(consumer['key'],
- client_secret=consumer['secret'],
- resource_owner_key=token.key,
- resource_owner_secret=token.secret,
- signature_method=oauth1.SIG_HMAC)
- endpoint = '/auth/tokens'
- url, headers, body = client.sign(self.base_url + endpoint,
- http_method='POST')
- headers.update({'Content-Type': 'application/json'})
- ref = {'auth': {'identity': {'oauth1': {}, 'methods': ['oauth1']}}}
- return endpoint, headers, ref
-
- def _authorize_request_token(self, request_id):
- return '/OS-OAUTH1/authorize/%s' % (request_id)
-
-
-class ConsumerCRUDTests(OAuth1Tests):
-
- def _consumer_create(self, description=None, description_flag=True,
- **kwargs):
- if description_flag:
- ref = {'description': description}
- else:
- ref = {}
- if kwargs:
- ref.update(kwargs)
- resp = self.post(
- self.CONSUMER_URL,
- body={'consumer': ref})
- consumer = resp.result['consumer']
- consumer_id = consumer['id']
- self.assertEqual(description, consumer['description'])
- self.assertIsNotNone(consumer_id)
- self.assertIsNotNone(consumer['secret'])
- return consumer
-
- def test_consumer_create(self):
- description = uuid.uuid4().hex
- self._consumer_create(description=description)
-
- def test_consumer_create_none_desc_1(self):
- self._consumer_create()
-
- def test_consumer_create_none_desc_2(self):
- self._consumer_create(description_flag=False)
-
- def test_consumer_create_normalize_field(self):
- # If create a consumer with a field with : or - in the name,
- # the name is normalized by converting those chars to _.
- field_name = 'some:weird-field'
- field_value = uuid.uuid4().hex
- extra_fields = {field_name: field_value}
- consumer = self._consumer_create(**extra_fields)
- normalized_field_name = 'some_weird_field'
- self.assertEqual(field_value, consumer[normalized_field_name])
-
- def test_consumer_delete(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- resp = self.delete(self.CONSUMER_URL + '/%s' % consumer_id)
- self.assertResponseStatus(resp, http_client.NO_CONTENT)
-
- def test_consumer_get(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- resp = self.get(self.CONSUMER_URL + '/%s' % consumer_id)
- self_url = ['http://localhost/v3', self.CONSUMER_URL,
- '/', consumer_id]
- self_url = ''.join(self_url)
- self.assertEqual(self_url, resp.result['consumer']['links']['self'])
- self.assertEqual(consumer_id, resp.result['consumer']['id'])
-
- def test_consumer_list(self):
- self._consumer_create()
- resp = self.get(self.CONSUMER_URL)
- entities = resp.result['consumers']
- self.assertIsNotNone(entities)
- self_url = ['http://localhost/v3', self.CONSUMER_URL]
- self_url = ''.join(self_url)
- self.assertEqual(self_url, resp.result['links']['self'])
- self.assertValidListLinks(resp.result['links'])
-
- def test_consumer_update(self):
- consumer = self._create_single_consumer()
- original_id = consumer['id']
- original_description = consumer['description']
- update_description = original_description + '_new'
-
- update_ref = {'description': update_description}
- update_resp = self.patch(self.CONSUMER_URL + '/%s' % original_id,
- body={'consumer': update_ref})
- consumer = update_resp.result['consumer']
- self.assertEqual(update_description, consumer['description'])
- self.assertEqual(original_id, consumer['id'])
-
- def test_consumer_update_bad_secret(self):
- consumer = self._create_single_consumer()
- original_id = consumer['id']
- update_ref = copy.deepcopy(consumer)
- update_ref['description'] = uuid.uuid4().hex
- update_ref['secret'] = uuid.uuid4().hex
- self.patch(self.CONSUMER_URL + '/%s' % original_id,
- body={'consumer': update_ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_consumer_update_bad_id(self):
- consumer = self._create_single_consumer()
- original_id = consumer['id']
- original_description = consumer['description']
- update_description = original_description + "_new"
-
- update_ref = copy.deepcopy(consumer)
- update_ref['description'] = update_description
- update_ref['id'] = update_description
- self.patch(self.CONSUMER_URL + '/%s' % original_id,
- body={'consumer': update_ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_consumer_update_normalize_field(self):
- # If update a consumer with a field with : or - in the name,
- # the name is normalized by converting those chars to _.
- field1_name = 'some:weird-field'
- field1_orig_value = uuid.uuid4().hex
-
- extra_fields = {field1_name: field1_orig_value}
- consumer = self._consumer_create(**extra_fields)
- consumer_id = consumer['id']
-
- field1_new_value = uuid.uuid4().hex
-
- field2_name = 'weird:some-field'
- field2_value = uuid.uuid4().hex
-
- update_ref = {field1_name: field1_new_value,
- field2_name: field2_value}
-
- update_resp = self.patch(self.CONSUMER_URL + '/%s' % consumer_id,
- body={'consumer': update_ref})
- consumer = update_resp.result['consumer']
-
- normalized_field1_name = 'some_weird_field'
- self.assertEqual(field1_new_value, consumer[normalized_field1_name])
-
- normalized_field2_name = 'weird_some_field'
- self.assertEqual(field2_value, consumer[normalized_field2_name])
-
- def test_consumer_create_no_description(self):
- resp = self.post(self.CONSUMER_URL, body={'consumer': {}})
- consumer = resp.result['consumer']
- consumer_id = consumer['id']
- self.assertIsNone(consumer['description'])
- self.assertIsNotNone(consumer_id)
- self.assertIsNotNone(consumer['secret'])
-
- def test_consumer_get_bad_id(self):
- self.get(self.CONSUMER_URL + '/%(consumer_id)s'
- % {'consumer_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
-
-class OAuthFlowTests(OAuth1Tests):
-
- def test_oauth_flow(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- self.consumer = {'key': consumer_id, 'secret': consumer_secret}
- self.assertIsNotNone(self.consumer['secret'])
-
- url, headers = self._create_request_token(self.consumer,
- self.project_id)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- request_key = credentials['oauth_token'][0]
- request_secret = credentials['oauth_token_secret'][0]
- self.request_token = oauth1.Token(request_key, request_secret)
- self.assertIsNotNone(self.request_token.key)
-
- url = self._authorize_request_token(request_key)
- body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=http_client.OK)
- self.verifier = resp.result['token']['oauth_verifier']
- self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier))
- self.assertEqual(8, len(self.verifier))
-
- self.request_token.set_verifier(self.verifier)
- url, headers = self._create_access_token(self.consumer,
- self.request_token)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- access_key = credentials['oauth_token'][0]
- access_secret = credentials['oauth_token_secret'][0]
- self.access_token = oauth1.Token(access_key, access_secret)
- self.assertIsNotNone(self.access_token.key)
-
- url, headers, body = self._get_oauth_token(self.consumer,
- self.access_token)
- content = self.post(url, headers=headers, body=body)
- self.keystone_token_id = content.headers['X-Subject-Token']
- self.keystone_token = content.result['token']
- self.assertIsNotNone(self.keystone_token_id)
-
-
-class AccessTokenCRUDTests(OAuthFlowTests):
- def test_delete_access_token_dne(self):
- self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
- % {'user': self.user_id,
- 'auth': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_list_no_access_tokens(self):
- resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
- % {'user_id': self.user_id})
- entities = resp.result['access_tokens']
- self.assertEqual([], entities)
- self.assertValidListLinks(resp.result['links'])
-
- def test_get_single_access_token(self):
- self.test_oauth_flow()
- url = '/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s' % {
- 'user_id': self.user_id,
- 'key': self.access_token.key
- }
- resp = self.get(url)
- entity = resp.result['access_token']
- self.assertEqual(self.access_token.key, entity['id'])
- self.assertEqual(self.consumer['key'], entity['consumer_id'])
- self.assertEqual('http://localhost/v3' + url, entity['links']['self'])
-
- def test_get_access_token_dne(self):
- self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s'
- % {'user_id': self.user_id,
- 'key': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_list_all_roles_in_access_token(self):
- self.test_oauth_flow()
- resp = self.get('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles'
- % {'id': self.user_id,
- 'key': self.access_token.key})
- entities = resp.result['roles']
- self.assertTrue(entities)
- self.assertValidListLinks(resp.result['links'])
-
- def test_get_role_in_access_token(self):
- self.test_oauth_flow()
- url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
- % {'id': self.user_id, 'key': self.access_token.key,
- 'role': self.role_id})
- resp = self.get(url)
- entity = resp.result['role']
- self.assertEqual(self.role_id, entity['id'])
-
- def test_get_role_in_access_token_dne(self):
- self.test_oauth_flow()
- url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
- % {'id': self.user_id, 'key': self.access_token.key,
- 'role': uuid.uuid4().hex})
- self.get(url, expected_status=http_client.NOT_FOUND)
-
- def test_list_and_delete_access_tokens(self):
- self.test_oauth_flow()
- # List access_tokens should be > 0
- resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
- % {'user_id': self.user_id})
- entities = resp.result['access_tokens']
- self.assertTrue(entities)
- self.assertValidListLinks(resp.result['links'])
-
- # Delete access_token
- resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
- % {'user': self.user_id,
- 'auth': self.access_token.key})
- self.assertResponseStatus(resp, http_client.NO_CONTENT)
-
- # List access_token should be 0
- resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
- % {'user_id': self.user_id})
- entities = resp.result['access_tokens']
- self.assertEqual([], entities)
- self.assertValidListLinks(resp.result['links'])
-
-
-class AuthTokenTests(OAuthFlowTests):
-
- def test_keystone_token_is_valid(self):
- self.test_oauth_flow()
- headers = {'X-Subject-Token': self.keystone_token_id,
- 'X-Auth-Token': self.keystone_token_id}
- r = self.get('/auth/tokens', headers=headers)
- self.assertValidTokenResponse(r, self.user)
-
- # now verify the oauth section
- oauth_section = r.result['token']['OS-OAUTH1']
- self.assertEqual(self.access_token.key,
- oauth_section['access_token_id'])
- self.assertEqual(self.consumer['key'], oauth_section['consumer_id'])
-
- # verify the roles section
- roles_list = r.result['token']['roles']
- # we can just verify the 0th role since we are only assigning one role
- self.assertEqual(self.role_id, roles_list[0]['id'])
-
- # verify that the token can perform delegated tasks
- ref = unit.new_user_ref(domain_id=self.domain_id)
- r = self.admin_request(path='/v3/users', headers=headers,
- method='POST', body={'user': ref})
- self.assertValidUserResponse(r, ref)
-
- def test_delete_access_token_also_revokes_token(self):
- self.test_oauth_flow()
-
- # Delete access token
- resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
- % {'user': self.user_id,
- 'auth': self.access_token.key})
- self.assertResponseStatus(resp, http_client.NO_CONTENT)
-
- # Check Keystone Token no longer exists
- headers = {'X-Subject-Token': self.keystone_token_id,
- 'X-Auth-Token': self.keystone_token_id}
- self.get('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
-
- def test_deleting_consumer_also_deletes_tokens(self):
- self.test_oauth_flow()
-
- # Delete consumer
- consumer_id = self.consumer['key']
- resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s'
- % {'consumer_id': consumer_id})
- self.assertResponseStatus(resp, http_client.NO_CONTENT)
-
- # List access_token should be 0
- resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
- % {'user_id': self.user_id})
- entities = resp.result['access_tokens']
- self.assertEqual([], entities)
-
- # Check Keystone Token no longer exists
- headers = {'X-Subject-Token': self.keystone_token_id,
- 'X-Auth-Token': self.keystone_token_id}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
-
- def test_change_user_password_also_deletes_tokens(self):
- self.test_oauth_flow()
-
- # delegated keystone token exists
- headers = {'X-Subject-Token': self.keystone_token_id,
- 'X-Auth-Token': self.keystone_token_id}
- r = self.get('/auth/tokens', headers=headers)
- self.assertValidTokenResponse(r, self.user)
-
- user = {'password': uuid.uuid4().hex}
- r = self.patch('/users/%(user_id)s' % {
- 'user_id': self.user['id']},
- body={'user': user})
-
- headers = {'X-Subject-Token': self.keystone_token_id,
- 'X-Auth-Token': self.keystone_token_id}
- self.admin_request(path='/auth/tokens', headers=headers,
- method='GET', expected_status=http_client.NOT_FOUND)
-
- def test_deleting_project_also_invalidates_tokens(self):
- self.test_oauth_flow()
-
- # delegated keystone token exists
- headers = {'X-Subject-Token': self.keystone_token_id,
- 'X-Auth-Token': self.keystone_token_id}
- r = self.get('/auth/tokens', headers=headers)
- self.assertValidTokenResponse(r, self.user)
-
- r = self.delete('/projects/%(project_id)s' % {
- 'project_id': self.project_id})
-
- headers = {'X-Subject-Token': self.keystone_token_id,
- 'X-Auth-Token': self.keystone_token_id}
- self.admin_request(path='/auth/tokens', headers=headers,
- method='GET', expected_status=http_client.NOT_FOUND)
-
- def test_token_chaining_is_not_allowed(self):
- self.test_oauth_flow()
-
- # attempt to re-authenticate (token chain) with the given token
- path = '/v3/auth/tokens/'
- auth_data = self.build_authentication_request(
- token=self.keystone_token_id)
-
- self.admin_request(
- path=path,
- body=auth_data,
- token=self.keystone_token_id,
- method='POST',
- expected_status=http_client.FORBIDDEN)
-
- def test_delete_keystone_tokens_by_consumer_id(self):
- self.test_oauth_flow()
- self.token_provider_api._persistence.get_token(self.keystone_token_id)
- self.token_provider_api._persistence.delete_tokens(
- self.user_id,
- consumer_id=self.consumer['key'])
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- self.keystone_token_id)
-
- def _create_trust_get_token(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.user_id,
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- del ref['id']
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- trust_id=trust['id'])
-
- return self.get_requested_token(auth_data)
-
- def _approve_request_token_url(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- self.consumer = {'key': consumer_id, 'secret': consumer_secret}
- self.assertIsNotNone(self.consumer['secret'])
-
- url, headers = self._create_request_token(self.consumer,
- self.project_id)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- request_key = credentials['oauth_token'][0]
- request_secret = credentials['oauth_token_secret'][0]
- self.request_token = oauth1.Token(request_key, request_secret)
- self.assertIsNotNone(self.request_token.key)
-
- url = self._authorize_request_token(request_key)
-
- return url
-
- def test_oauth_token_cannot_create_new_trust(self):
- self.test_oauth_flow()
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.user_id,
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- del ref['id']
-
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- token=self.keystone_token_id,
- expected_status=http_client.FORBIDDEN)
-
- def test_oauth_token_cannot_authorize_request_token(self):
- self.test_oauth_flow()
- url = self._approve_request_token_url()
- body = {'roles': [{'id': self.role_id}]}
- self.put(url, body=body, token=self.keystone_token_id,
- expected_status=http_client.FORBIDDEN)
-
- def test_oauth_token_cannot_list_request_tokens(self):
- self._set_policy({"identity:list_access_tokens": [],
- "identity:create_consumer": [],
- "identity:authorize_request_token": []})
- self.test_oauth_flow()
- url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id
- self.get(url, token=self.keystone_token_id,
- expected_status=http_client.FORBIDDEN)
-
- def _set_policy(self, new_policy):
- self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
- self.tmpfilename = self.tempfile.file_name
- self.config_fixture.config(group='oslo_policy',
- policy_file=self.tmpfilename)
- with open(self.tmpfilename, "w") as policyfile:
- policyfile.write(jsonutils.dumps(new_policy))
-
- def test_trust_token_cannot_authorize_request_token(self):
- trust_token = self._create_trust_get_token()
- url = self._approve_request_token_url()
- body = {'roles': [{'id': self.role_id}]}
- self.put(url, body=body, token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
- def test_trust_token_cannot_list_request_tokens(self):
- self._set_policy({"identity:list_access_tokens": [],
- "identity:create_trust": []})
- trust_token = self._create_trust_get_token()
- url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id
- self.get(url, token=trust_token,
- expected_status=http_client.FORBIDDEN)
-
-
-class FernetAuthTokenTests(AuthTokenTests):
-
- def config_overrides(self):
- super(FernetAuthTokenTests, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- def test_delete_keystone_tokens_by_consumer_id(self):
- # NOTE(lbragstad): Fernet tokens are never persisted in the backend.
- pass
-
-
-class MaliciousOAuth1Tests(OAuth1Tests):
-
- def test_bad_consumer_secret(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer = {'key': consumer_id, 'secret': uuid.uuid4().hex}
- url, headers = self._create_request_token(consumer, self.project_id)
- self.post(url, headers=headers,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_bad_request_token_key(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- consumer = {'key': consumer_id, 'secret': consumer_secret}
- url, headers = self._create_request_token(consumer, self.project_id)
- self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- url = self._authorize_request_token(uuid.uuid4().hex)
- body = {'roles': [{'id': self.role_id}]}
- self.put(url, body=body, expected_status=http_client.NOT_FOUND)
-
- def test_bad_consumer_id(self):
- consumer = self._create_single_consumer()
- consumer_id = uuid.uuid4().hex
- consumer_secret = consumer['secret']
- consumer = {'key': consumer_id, 'secret': consumer_secret}
- url, headers = self._create_request_token(consumer, self.project_id)
- self.post(url, headers=headers, expected_status=http_client.NOT_FOUND)
-
- def test_bad_requested_project_id(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- consumer = {'key': consumer_id, 'secret': consumer_secret}
- project_id = uuid.uuid4().hex
- url, headers = self._create_request_token(consumer, project_id)
- self.post(url, headers=headers, expected_status=http_client.NOT_FOUND)
-
- def test_bad_verifier(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- consumer = {'key': consumer_id, 'secret': consumer_secret}
-
- url, headers = self._create_request_token(consumer, self.project_id)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- request_key = credentials['oauth_token'][0]
- request_secret = credentials['oauth_token_secret'][0]
- request_token = oauth1.Token(request_key, request_secret)
-
- url = self._authorize_request_token(request_key)
- body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=http_client.OK)
- verifier = resp.result['token']['oauth_verifier']
- self.assertIsNotNone(verifier)
-
- request_token.set_verifier(uuid.uuid4().hex)
- url, headers = self._create_access_token(consumer, request_token)
- self.post(url, headers=headers,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_bad_authorizing_roles(self):
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- consumer = {'key': consumer_id, 'secret': consumer_secret}
-
- url, headers = self._create_request_token(consumer, self.project_id)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- request_key = credentials['oauth_token'][0]
-
- self.assignment_api.remove_role_from_user_and_project(
- self.user_id, self.project_id, self.role_id)
- url = self._authorize_request_token(request_key)
- body = {'roles': [{'id': self.role_id}]}
- self.admin_request(path=url, method='PUT',
- body=body, expected_status=http_client.NOT_FOUND)
-
- def test_expired_authorizing_request_token(self):
- self.config_fixture.config(group='oauth1', request_token_duration=-1)
-
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- self.consumer = {'key': consumer_id, 'secret': consumer_secret}
- self.assertIsNotNone(self.consumer['key'])
-
- url, headers = self._create_request_token(self.consumer,
- self.project_id)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- request_key = credentials['oauth_token'][0]
- request_secret = credentials['oauth_token_secret'][0]
- self.request_token = oauth1.Token(request_key, request_secret)
- self.assertIsNotNone(self.request_token.key)
-
- url = self._authorize_request_token(request_key)
- body = {'roles': [{'id': self.role_id}]}
- self.put(url, body=body, expected_status=http_client.UNAUTHORIZED)
-
- def test_expired_creating_keystone_token(self):
- self.config_fixture.config(group='oauth1', access_token_duration=-1)
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- self.consumer = {'key': consumer_id, 'secret': consumer_secret}
- self.assertIsNotNone(self.consumer['key'])
-
- url, headers = self._create_request_token(self.consumer,
- self.project_id)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- request_key = credentials['oauth_token'][0]
- request_secret = credentials['oauth_token_secret'][0]
- self.request_token = oauth1.Token(request_key, request_secret)
- self.assertIsNotNone(self.request_token.key)
-
- url = self._authorize_request_token(request_key)
- body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=http_client.OK)
- self.verifier = resp.result['token']['oauth_verifier']
-
- self.request_token.set_verifier(self.verifier)
- url, headers = self._create_access_token(self.consumer,
- self.request_token)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- access_key = credentials['oauth_token'][0]
- access_secret = credentials['oauth_token_secret'][0]
- self.access_token = oauth1.Token(access_key, access_secret)
- self.assertIsNotNone(self.access_token.key)
-
- url, headers, body = self._get_oauth_token(self.consumer,
- self.access_token)
- self.post(url, headers=headers, body=body,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_missing_oauth_headers(self):
- endpoint = '/OS-OAUTH1/request_token'
- client = oauth1.Client(uuid.uuid4().hex,
- client_secret=uuid.uuid4().hex,
- signature_method=oauth1.SIG_HMAC,
- callback_uri="oob")
- headers = {'requested_project_id': uuid.uuid4().hex}
- _url, headers, _body = client.sign(self.base_url + endpoint,
- http_method='POST',
- headers=headers)
-
- # NOTE(stevemar): To simulate this error, we remove the Authorization
- # header from the post request.
- del headers['Authorization']
- self.post(endpoint, headers=headers,
- expected_status=http_client.INTERNAL_SERVER_ERROR)
-
-
-class OAuthNotificationTests(OAuth1Tests,
- test_notifications.BaseNotificationTest):
-
- def test_create_consumer(self):
- consumer_ref = self._create_single_consumer()
- self._assert_notify_sent(consumer_ref['id'],
- test_notifications.CREATED_OPERATION,
- 'OS-OAUTH1:consumer')
- self._assert_last_audit(consumer_ref['id'],
- test_notifications.CREATED_OPERATION,
- 'OS-OAUTH1:consumer',
- cadftaxonomy.SECURITY_ACCOUNT)
-
- def test_update_consumer(self):
- consumer_ref = self._create_single_consumer()
- update_ref = {'consumer': {'description': uuid.uuid4().hex}}
- self.oauth_api.update_consumer(consumer_ref['id'], update_ref)
- self._assert_notify_sent(consumer_ref['id'],
- test_notifications.UPDATED_OPERATION,
- 'OS-OAUTH1:consumer')
- self._assert_last_audit(consumer_ref['id'],
- test_notifications.UPDATED_OPERATION,
- 'OS-OAUTH1:consumer',
- cadftaxonomy.SECURITY_ACCOUNT)
-
- def test_delete_consumer(self):
- consumer_ref = self._create_single_consumer()
- self.oauth_api.delete_consumer(consumer_ref['id'])
- self._assert_notify_sent(consumer_ref['id'],
- test_notifications.DELETED_OPERATION,
- 'OS-OAUTH1:consumer')
- self._assert_last_audit(consumer_ref['id'],
- test_notifications.DELETED_OPERATION,
- 'OS-OAUTH1:consumer',
- cadftaxonomy.SECURITY_ACCOUNT)
-
- def test_oauth_flow_notifications(self):
- """Test to ensure notifications are sent for oauth tokens
-
- This test is very similar to test_oauth_flow, however
- there are additional checks in this test for ensuring that
- notifications for request token creation, and access token
- creation/deletion are emitted.
- """
- consumer = self._create_single_consumer()
- consumer_id = consumer['id']
- consumer_secret = consumer['secret']
- self.consumer = {'key': consumer_id, 'secret': consumer_secret}
- self.assertIsNotNone(self.consumer['secret'])
-
- url, headers = self._create_request_token(self.consumer,
- self.project_id)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- request_key = credentials['oauth_token'][0]
- request_secret = credentials['oauth_token_secret'][0]
- self.request_token = oauth1.Token(request_key, request_secret)
- self.assertIsNotNone(self.request_token.key)
-
- # Test to ensure the create request token notification is sent
- self._assert_notify_sent(request_key,
- test_notifications.CREATED_OPERATION,
- 'OS-OAUTH1:request_token')
- self._assert_last_audit(request_key,
- test_notifications.CREATED_OPERATION,
- 'OS-OAUTH1:request_token',
- cadftaxonomy.SECURITY_CREDENTIAL)
-
- url = self._authorize_request_token(request_key)
- body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=http_client.OK)
- self.verifier = resp.result['token']['oauth_verifier']
- self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier))
- self.assertEqual(8, len(self.verifier))
-
- self.request_token.set_verifier(self.verifier)
- url, headers = self._create_access_token(self.consumer,
- self.request_token)
- content = self.post(
- url, headers=headers,
- response_content_type='application/x-www-urlformencoded')
- credentials = urllib.parse.parse_qs(content.result)
- access_key = credentials['oauth_token'][0]
- access_secret = credentials['oauth_token_secret'][0]
- self.access_token = oauth1.Token(access_key, access_secret)
- self.assertIsNotNone(self.access_token.key)
-
- # Test to ensure the create access token notification is sent
- self._assert_notify_sent(access_key,
- test_notifications.CREATED_OPERATION,
- 'OS-OAUTH1:access_token')
- self._assert_last_audit(access_key,
- test_notifications.CREATED_OPERATION,
- 'OS-OAUTH1:access_token',
- cadftaxonomy.SECURITY_CREDENTIAL)
-
- resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
- % {'user': self.user_id,
- 'auth': self.access_token.key})
- self.assertResponseStatus(resp, http_client.NO_CONTENT)
-
- # Test to ensure the delete access token notification is sent
- self._assert_notify_sent(access_key,
- test_notifications.DELETED_OPERATION,
- 'OS-OAUTH1:access_token')
- self._assert_last_audit(access_key,
- test_notifications.DELETED_OPERATION,
- 'OS-OAUTH1:access_token',
- cadftaxonomy.SECURITY_CREDENTIAL)
-
-
-class OAuthCADFNotificationTests(OAuthNotificationTests):
-
- def setUp(self):
- """Repeat the tests for CADF notifications."""
- super(OAuthCADFNotificationTests, self).setUp()
- self.config_fixture.config(notification_format='cadf')
-
-
-class JsonHomeTests(OAuth1Tests, test_v3.JsonHomeTestMixin):
- JSON_HOME_DATA = {
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-OAUTH1/1.0/'
- 'rel/consumers': {
- 'href': '/OS-OAUTH1/consumers',
- },
- }
diff --git a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
deleted file mode 100644
index 5fb5387a..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import uuid
-
-from oslo_utils import timeutils
-import six
-from six.moves import http_client
-from testtools import matchers
-
-from keystone.common import utils
-from keystone.models import revoke_model
-from keystone.tests.unit import test_v3
-from keystone.token import provider
-
-
-def _future_time_string():
- expire_delta = datetime.timedelta(seconds=1000)
- future_time = timeutils.utcnow() + expire_delta
- return utils.isotime(future_time)
-
-
-class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
-
- JSON_HOME_DATA = {
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-REVOKE/1.0/'
- 'rel/events': {
- 'href': '/OS-REVOKE/events',
- },
- }
-
- def test_get_empty_list(self):
- resp = self.get('/OS-REVOKE/events')
- self.assertEqual([], resp.json_body['events'])
-
- def _blank_event(self):
- return {}
-
- # The two values will be the same with the exception of
- # 'issued_before' which is set when the event is recorded.
- def assertReportedEventMatchesRecorded(self, event, sample, before_time):
- after_time = timeutils.utcnow()
- event_issued_before = timeutils.normalize_time(
- timeutils.parse_isotime(event['issued_before']))
- self.assertTrue(
- before_time <= event_issued_before,
- 'invalid event issued_before time; %s is not later than %s.' % (
- utils.isotime(event_issued_before, subsecond=True),
- utils.isotime(before_time, subsecond=True)))
- self.assertTrue(
- event_issued_before <= after_time,
- 'invalid event issued_before time; %s is not earlier than %s.' % (
- utils.isotime(event_issued_before, subsecond=True),
- utils.isotime(after_time, subsecond=True)))
- del (event['issued_before'])
- self.assertEqual(sample, event)
-
- def test_revoked_list_self_url(self):
- revoked_list_url = '/OS-REVOKE/events'
- resp = self.get(revoked_list_url)
- links = resp.json_body['links']
- self.assertThat(links['self'], matchers.EndsWith(revoked_list_url))
-
- def test_revoked_token_in_list(self):
- user_id = uuid.uuid4().hex
- expires_at = provider.default_expire_time()
- sample = self._blank_event()
- sample['user_id'] = six.text_type(user_id)
- sample['expires_at'] = six.text_type(utils.isotime(expires_at))
- before_time = timeutils.utcnow()
- self.revoke_api.revoke_by_expiration(user_id, expires_at)
- resp = self.get('/OS-REVOKE/events')
- events = resp.json_body['events']
- self.assertEqual(1, len(events))
- self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
-
- def test_disabled_project_in_list(self):
- project_id = uuid.uuid4().hex
- sample = dict()
- sample['project_id'] = six.text_type(project_id)
- before_time = timeutils.utcnow()
- self.revoke_api.revoke(
- revoke_model.RevokeEvent(project_id=project_id))
-
- resp = self.get('/OS-REVOKE/events')
- events = resp.json_body['events']
- self.assertEqual(1, len(events))
- self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
-
- def test_disabled_domain_in_list(self):
- domain_id = uuid.uuid4().hex
- sample = dict()
- sample['domain_id'] = six.text_type(domain_id)
- before_time = timeutils.utcnow()
- self.revoke_api.revoke(
- revoke_model.RevokeEvent(domain_id=domain_id))
-
- resp = self.get('/OS-REVOKE/events')
- events = resp.json_body['events']
- self.assertEqual(1, len(events))
- self.assertReportedEventMatchesRecorded(events[0], sample, before_time)
-
- def test_list_since_invalid(self):
- self.get('/OS-REVOKE/events?since=blah',
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_since_valid(self):
- resp = self.get('/OS-REVOKE/events?since=2013-02-27T18:30:59.999999Z')
- events = resp.json_body['events']
- self.assertEqual(0, len(events))
-
- def test_since_future_time_no_events(self):
- domain_id = uuid.uuid4().hex
- sample = dict()
- sample['domain_id'] = six.text_type(domain_id)
-
- self.revoke_api.revoke(
- revoke_model.RevokeEvent(domain_id=domain_id))
-
- resp = self.get('/OS-REVOKE/events')
- events = resp.json_body['events']
- self.assertEqual(1, len(events))
-
- resp = self.get('/OS-REVOKE/events?since=%s' % _future_time_string())
- events = resp.json_body['events']
- self.assertEqual([], events)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_policy.py b/keystone-moon/keystone/tests/unit/test_v3_policy.py
deleted file mode 100644
index 76a52088..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_policy.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import json
-import uuid
-
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-class PolicyTestCase(test_v3.RestfulTestCase):
- """Test policy CRUD."""
-
- def setUp(self):
- super(PolicyTestCase, self).setUp()
- self.policy = unit.new_policy_ref()
- self.policy_id = self.policy['id']
- self.policy_api.create_policy(
- self.policy_id,
- self.policy.copy())
-
- # policy crud tests
-
- def test_create_policy(self):
- """Call ``POST /policies``."""
- ref = unit.new_policy_ref()
- r = self.post('/policies', body={'policy': ref})
- return self.assertValidPolicyResponse(r, ref)
-
- def test_list_policies(self):
- """Call ``GET /policies``."""
- r = self.get('/policies')
- self.assertValidPolicyListResponse(r, ref=self.policy)
-
- def test_get_policy(self):
- """Call ``GET /policies/{policy_id}``."""
- r = self.get(
- '/policies/%(policy_id)s' % {'policy_id': self.policy_id})
- self.assertValidPolicyResponse(r, self.policy)
-
- def test_update_policy(self):
- """Call ``PATCH /policies/{policy_id}``."""
- self.policy['blob'] = json.dumps({'data': uuid.uuid4().hex, })
- r = self.patch(
- '/policies/%(policy_id)s' % {'policy_id': self.policy_id},
- body={'policy': self.policy})
- self.assertValidPolicyResponse(r, self.policy)
-
- def test_delete_policy(self):
- """Call ``DELETE /policies/{policy_id}``."""
- self.delete(
- '/policies/%(policy_id)s' % {'policy_id': self.policy_id})
diff --git a/keystone-moon/keystone/tests/unit/test_v3_protection.py b/keystone-moon/keystone/tests/unit/test_v3_protection.py
deleted file mode 100644
index f77a1528..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_protection.py
+++ /dev/null
@@ -1,1777 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-# Copyright 2013 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-from oslo_serialization import jsonutils
-from six.moves import http_client
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit.ksfixtures import temporaryfile
-from keystone.tests.unit import test_v3
-from keystone.tests.unit import utils
-
-
-CONF = cfg.CONF
-
-
-class IdentityTestProtectedCase(test_v3.RestfulTestCase):
- """Test policy enforcement on the v3 Identity API."""
-
- def _policy_fixture(self):
- return ksfixtures.Policy(self.tmpfilename, self.config_fixture)
-
- def setUp(self):
- """Setup for Identity Protection Test Cases.
-
- As well as the usual housekeeping, create a set of domains,
- users, roles and projects for the subsequent tests:
-
- - Three domains: A,B & C. C is disabled.
- - DomainA has user1, DomainB has user2 and user3
- - DomainA has group1 and group2, DomainB has group3
- - User1 has two roles on DomainA
- - User2 has one role on DomainA
-
- Remember that there will also be a fourth domain in existence,
- the default domain.
-
- """
- self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
- self.tmpfilename = self.tempfile.file_name
- super(IdentityTestProtectedCase, self).setUp()
-
- # A default auth request we can use - un-scoped user token
- self.auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'])
-
- def load_sample_data(self):
- self._populate_default_domain()
- # Start by creating a couple of domains
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.domainC = unit.new_domain_ref(enabled=False)
- self.resource_api.create_domain(self.domainC['id'], self.domainC)
-
- # Now create some users, one in domainA and two of them in domainB
- self.user1 = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
- self.user2 = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
- self.user3 = unit.create_user(self.identity_api,
- domain_id=self.domainB['id'])
-
- self.group1 = unit.new_group_ref(domain_id=self.domainA['id'])
- self.group1 = self.identity_api.create_group(self.group1)
-
- self.group2 = unit.new_group_ref(domain_id=self.domainA['id'])
- self.group2 = self.identity_api.create_group(self.group2)
-
- self.group3 = unit.new_group_ref(domain_id=self.domainB['id'])
- self.group3 = self.identity_api.create_group(self.group3)
-
- self.role = unit.new_role_ref()
- self.role_api.create_role(self.role['id'], self.role)
- self.role1 = unit.new_role_ref()
- self.role_api.create_role(self.role1['id'], self.role1)
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user1['id'],
- domain_id=self.domainA['id'])
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user2['id'],
- domain_id=self.domainA['id'])
- self.assignment_api.create_grant(self.role1['id'],
- user_id=self.user1['id'],
- domain_id=self.domainA['id'])
-
- def _get_id_list_from_ref_list(self, ref_list):
- result_list = []
- for x in ref_list:
- result_list.append(x['id'])
- return result_list
-
- def _set_policy(self, new_policy):
- with open(self.tmpfilename, "w") as policyfile:
- policyfile.write(jsonutils.dumps(new_policy))
-
- def test_list_users_unprotected(self):
- """GET /users (unprotected)
-
- Test Plan:
-
- - Update policy so api is unprotected
- - Use an un-scoped token to make sure we can get back all
- the users independent of domain
-
- """
- self._set_policy({"identity:list_users": []})
- r = self.get('/users', auth=self.auth)
- id_list = self._get_id_list_from_ref_list(r.result.get('users'))
- self.assertIn(self.user1['id'], id_list)
- self.assertIn(self.user2['id'], id_list)
- self.assertIn(self.user3['id'], id_list)
-
- def test_list_users_filtered_by_domain(self):
- """GET /users?domain_id=mydomain (filtered)
-
- Test Plan:
-
- - Update policy so api is unprotected
- - Use an un-scoped token to make sure we can filter the
- users by domainB, getting back the 2 users in that domain
-
- """
- self._set_policy({"identity:list_users": []})
- url_by_name = '/users?domain_id=%s' % self.domainB['id']
- r = self.get(url_by_name, auth=self.auth)
- # We should get back two users, those in DomainB
- id_list = self._get_id_list_from_ref_list(r.result.get('users'))
- self.assertIn(self.user2['id'], id_list)
- self.assertIn(self.user3['id'], id_list)
-
- def test_get_user_protected_match_id(self):
- """GET /users/{id} (match payload)
-
- Test Plan:
-
- - Update policy to protect api by user_id
- - List users with user_id of user1 as filter, to check that
- this will correctly match user_id in the flattened
- payload
-
- """
- # TODO(henry-nash, ayoung): It would be good to expand this
- # test for further test flattening, e.g. protect on, say, an
- # attribute of an object being created
- new_policy = {"identity:get_user": [["user_id:%(user_id)s"]]}
- self._set_policy(new_policy)
- url_by_name = '/users/%s' % self.user1['id']
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(self.user1['id'], r.result['user']['id'])
-
- def test_get_user_protected_match_target(self):
- """GET /users/{id} (match target)
-
- Test Plan:
-
- - Update policy to protect api by domain_id
- - Try and read a user who is in DomainB with a token scoped
- to Domain A - this should fail
- - Retry this for a user who is in Domain A, which should succeed.
- - Finally, try getting a user that does not exist, which should
- still return UserNotFound
-
- """
- new_policy = {'identity:get_user':
- [["domain_id:%(target.user.domain_id)s"]]}
- self._set_policy(new_policy)
- self.auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- domain_id=self.domainA['id'])
- url_by_name = '/users/%s' % self.user2['id']
- r = self.get(url_by_name, auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
-
- url_by_name = '/users/%s' % self.user1['id']
- r = self.get(url_by_name, auth=self.auth)
- self.assertEqual(self.user1['id'], r.result['user']['id'])
-
- url_by_name = '/users/%s' % uuid.uuid4().hex
- r = self.get(url_by_name, auth=self.auth,
- expected_status=exception.UserNotFound.code)
-
- def test_revoke_grant_protected_match_target(self):
- """DELETE /domains/{id}/users/{id}/roles/{id} (match target)
-
- Test Plan:
-
- - Update policy to protect api by domain_id of entities in
- the grant
- - Try and delete the existing grant that has a user who is
- from a different domain - this should fail.
- - Retry this for a user who is in Domain A, which should succeed.
-
- """
- new_policy = {'identity:revoke_grant':
- [["domain_id:%(target.user.domain_id)s"]]}
- self._set_policy(new_policy)
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domainA['id'],
- 'user_id': self.user2['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role['id']}
-
- self.auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- domain_id=self.domainA['id'])
- self.delete(member_url, auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
-
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domainA['id'],
- 'user_id': self.user1['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role1['id']}
- self.delete(member_url, auth=self.auth)
-
- def test_list_users_protected_by_domain(self):
- """GET /users?domain_id=mydomain (protected)
-
- Test Plan:
-
- - Update policy to protect api by domain_id
- - List groups using a token scoped to domainA with a filter
- specifying domainA - we should only get back the one user
- that is in domainA.
- - Try and read the users from domainB - this should fail since
- we don't have a token scoped for domainB
-
- """
- new_policy = {"identity:list_users": ["domain_id:%(domain_id)s"]}
- self._set_policy(new_policy)
- self.auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- domain_id=self.domainA['id'])
- url_by_name = '/users?domain_id=%s' % self.domainA['id']
- r = self.get(url_by_name, auth=self.auth)
- # We should only get back one user, the one in DomainA
- id_list = self._get_id_list_from_ref_list(r.result.get('users'))
- self.assertEqual(1, len(id_list))
- self.assertIn(self.user1['id'], id_list)
-
- # Now try for domainB, which should fail
- url_by_name = '/users?domain_id=%s' % self.domainB['id']
- r = self.get(url_by_name, auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
-
- def test_list_groups_protected_by_domain(self):
- """GET /groups?domain_id=mydomain (protected)
-
- Test Plan:
-
- - Update policy to protect api by domain_id
- - List groups using a token scoped to domainA and make sure
- we only get back the two groups that are in domainA
- - Try and read the groups from domainB - this should fail since
- we don't have a token scoped for domainB
-
- """
- new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
- self._set_policy(new_policy)
- self.auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- domain_id=self.domainA['id'])
- url_by_name = '/groups?domain_id=%s' % self.domainA['id']
- r = self.get(url_by_name, auth=self.auth)
- # We should only get back two groups, the ones in DomainA
- id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
- self.assertEqual(2, len(id_list))
- self.assertIn(self.group1['id'], id_list)
- self.assertIn(self.group2['id'], id_list)
-
- # Now try for domainB, which should fail
- url_by_name = '/groups?domain_id=%s' % self.domainB['id']
- r = self.get(url_by_name, auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
-
- def test_list_groups_protected_by_domain_and_filtered(self):
- """GET /groups?domain_id=mydomain&name=myname (protected)
-
- Test Plan:
-
- - Update policy to protect api by domain_id
- - List groups using a token scoped to domainA with a filter
- specifying both domainA and the name of group.
- - We should only get back the group in domainA that matches
- the name
-
- """
- new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]}
- self._set_policy(new_policy)
- self.auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'],
- domain_id=self.domainA['id'])
- url_by_name = '/groups?domain_id=%s&name=%s' % (
- self.domainA['id'], self.group2['name'])
- r = self.get(url_by_name, auth=self.auth)
- # We should only get back one user, the one in DomainA that matches
- # the name supplied
- id_list = self._get_id_list_from_ref_list(r.result.get('groups'))
- self.assertEqual(1, len(id_list))
- self.assertIn(self.group2['id'], id_list)
-
-
-class IdentityTestPolicySample(test_v3.RestfulTestCase):
- """Test policy enforcement of the policy.json file."""
-
- def load_sample_data(self):
- self._populate_default_domain()
-
- self.just_a_user = unit.create_user(
- self.identity_api,
- domain_id=CONF.identity.default_domain_id)
- self.another_user = unit.create_user(
- self.identity_api,
- domain_id=CONF.identity.default_domain_id)
- self.admin_user = unit.create_user(
- self.identity_api,
- domain_id=CONF.identity.default_domain_id)
-
- self.role = unit.new_role_ref()
- self.role_api.create_role(self.role['id'], self.role)
- self.admin_role = unit.new_role_ref(name='admin')
- self.role_api.create_role(self.admin_role['id'], self.admin_role)
-
- # Create and assign roles to the project
- self.project = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
- self.resource_api.create_project(self.project['id'], self.project)
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.just_a_user['id'],
- project_id=self.project['id'])
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.another_user['id'],
- project_id=self.project['id'])
- self.assignment_api.create_grant(self.admin_role['id'],
- user_id=self.admin_user['id'],
- project_id=self.project['id'])
-
- def test_user_validate_same_token(self):
- # Given a non-admin user token, the token can be used to validate
- # itself.
- # This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token = self.get_requested_token(auth)
-
- self.get('/auth/tokens', token=token,
- headers={'X-Subject-Token': token})
-
- def test_user_validate_user_token(self):
- # A user can validate one of their own tokens.
- # This is GET /v3/auth/tokens
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token1 = self.get_requested_token(auth)
- token2 = self.get_requested_token(auth)
-
- self.get('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2})
-
- def test_user_validate_other_user_token_rejected(self):
- # A user cannot validate another user's token.
- # This is GET /v3/auth/tokens
-
- user1_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user1_token = self.get_requested_token(user1_auth)
-
- user2_auth = self.build_authentication_request(
- user_id=self.another_user['id'],
- password=self.another_user['password'])
- user2_token = self.get_requested_token(user2_auth)
-
- self.get('/auth/tokens', token=user1_token,
- headers={'X-Subject-Token': user2_token},
- expected_status=http_client.FORBIDDEN)
-
- def test_admin_validate_user_token(self):
- # An admin can validate a user's token.
- # This is GET /v3/auth/tokens
-
- admin_auth = self.build_authentication_request(
- user_id=self.admin_user['id'],
- password=self.admin_user['password'],
- project_id=self.project['id'])
- admin_token = self.get_requested_token(admin_auth)
-
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user_token = self.get_requested_token(user_auth)
-
- self.get('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token})
-
- def test_user_check_same_token(self):
- # Given a non-admin user token, the token can be used to check
- # itself.
- # This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token = self.get_requested_token(auth)
-
- self.head('/auth/tokens', token=token,
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
-
- def test_user_check_user_token(self):
- # A user can check one of their own tokens.
- # This is HEAD /v3/auth/tokens
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token1 = self.get_requested_token(auth)
- token2 = self.get_requested_token(auth)
-
- self.head('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2},
- expected_status=http_client.OK)
-
- def test_user_check_other_user_token_rejected(self):
- # A user cannot check another user's token.
- # This is HEAD /v3/auth/tokens
-
- user1_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user1_token = self.get_requested_token(user1_auth)
-
- user2_auth = self.build_authentication_request(
- user_id=self.another_user['id'],
- password=self.another_user['password'])
- user2_token = self.get_requested_token(user2_auth)
-
- self.head('/auth/tokens', token=user1_token,
- headers={'X-Subject-Token': user2_token},
- expected_status=http_client.FORBIDDEN)
-
- def test_admin_check_user_token(self):
- # An admin can check a user's token.
- # This is HEAD /v3/auth/tokens
-
- admin_auth = self.build_authentication_request(
- user_id=self.admin_user['id'],
- password=self.admin_user['password'],
- project_id=self.project['id'])
- admin_token = self.get_requested_token(admin_auth)
-
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user_token = self.get_requested_token(user_auth)
-
- self.head('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token},
- expected_status=http_client.OK)
-
- def test_user_revoke_same_token(self):
- # Given a non-admin user token, the token can be used to revoke
- # itself.
- # This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token = self.get_requested_token(auth)
-
- self.delete('/auth/tokens', token=token,
- headers={'X-Subject-Token': token})
-
- def test_user_revoke_user_token(self):
- # A user can revoke one of their own tokens.
- # This is DELETE /v3/auth/tokens
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token1 = self.get_requested_token(auth)
- token2 = self.get_requested_token(auth)
-
- self.delete('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2})
-
- def test_user_revoke_other_user_token_rejected(self):
- # A user cannot revoke another user's token.
- # This is DELETE /v3/auth/tokens
-
- user1_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user1_token = self.get_requested_token(user1_auth)
-
- user2_auth = self.build_authentication_request(
- user_id=self.another_user['id'],
- password=self.another_user['password'])
- user2_token = self.get_requested_token(user2_auth)
-
- self.delete('/auth/tokens', token=user1_token,
- headers={'X-Subject-Token': user2_token},
- expected_status=http_client.FORBIDDEN)
-
- def test_admin_revoke_user_token(self):
- # An admin can revoke a user's token.
- # This is DELETE /v3/auth/tokens
-
- admin_auth = self.build_authentication_request(
- user_id=self.admin_user['id'],
- password=self.admin_user['password'],
- project_id=self.project['id'])
- admin_token = self.get_requested_token(admin_auth)
-
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user_token = self.get_requested_token(user_auth)
-
- self.delete('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token})
-
-
-class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
- test_v3.AssignmentTestMixin):
- """Test policy enforcement of the sample v3 cloud policy file."""
-
- def _policy_fixture(self):
- return ksfixtures.Policy(unit.dirs.etc('policy.v3cloudsample.json'),
- self.config_fixture)
-
- def setUp(self):
- """Setup for v3 Cloud Policy Sample Test Cases.
-
- The following data is created:
-
- - Three domains: domainA, domainB and admin_domain
- - One project, which name is 'project'
- - domainA has three users: domain_admin_user, project_admin_user and
- just_a_user:
-
- - domain_admin_user has role 'admin' on domainA,
- - project_admin_user has role 'admin' on the project,
- - just_a_user has a non-admin role on both domainA and the project.
- - admin_domain has admin_project, and user cloud_admin_user, with an
- 'admin' role on admin_project.
-
- We test various api protection rules from the cloud sample policy
- file to make sure the sample is valid and that we correctly enforce it.
-
- """
- # Ensure that test_v3.RestfulTestCase doesn't load its own
- # sample data, which would make checking the results of our
- # tests harder
- super(IdentityTestv3CloudPolicySample, self).setUp()
-
- self.config_fixture.config(
- group='resource',
- admin_project_name=self.admin_project['name'])
- self.config_fixture.config(
- group='resource',
- admin_project_domain_name=self.admin_domain['name'])
-
- def load_sample_data(self):
- # Start by creating a couple of domains
- self._populate_default_domain()
- self.domainA = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = unit.new_domain_ref()
- self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.admin_domain = unit.new_domain_ref()
- self.resource_api.create_domain(self.admin_domain['id'],
- self.admin_domain)
-
- self.admin_project = unit.new_project_ref(
- domain_id=self.admin_domain['id'])
- self.resource_api.create_project(self.admin_project['id'],
- self.admin_project)
-
- # And our users
- self.cloud_admin_user = unit.create_user(
- self.identity_api,
- domain_id=self.admin_domain['id'])
- self.just_a_user = unit.create_user(
- self.identity_api,
- domain_id=self.domainA['id'])
- self.domain_admin_user = unit.create_user(
- self.identity_api,
- domain_id=self.domainA['id'])
- self.domainB_admin_user = unit.create_user(
- self.identity_api,
- domain_id=self.domainB['id'])
- self.project_admin_user = unit.create_user(
- self.identity_api,
- domain_id=self.domainA['id'])
- self.project_adminB_user = unit.create_user(
- self.identity_api,
- domain_id=self.domainB['id'])
-
- # The admin role, a domain specific role and another plain role
- self.admin_role = unit.new_role_ref(name='admin')
- self.role_api.create_role(self.admin_role['id'], self.admin_role)
- self.roleA = unit.new_role_ref(domain_id=self.domainA['id'])
- self.role_api.create_role(self.roleA['id'], self.roleA)
- self.role = unit.new_role_ref()
- self.role_api.create_role(self.role['id'], self.role)
-
- # The cloud admin just gets the admin role on the special admin project
- self.assignment_api.create_grant(self.admin_role['id'],
- user_id=self.cloud_admin_user['id'],
- project_id=self.admin_project['id'])
-
- # Assign roles to the domain
- self.assignment_api.create_grant(self.admin_role['id'],
- user_id=self.domain_admin_user['id'],
- domain_id=self.domainA['id'])
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.just_a_user['id'],
- domain_id=self.domainA['id'])
- self.assignment_api.create_grant(self.admin_role['id'],
- user_id=self.domainB_admin_user['id'],
- domain_id=self.domainB['id'])
-
- # Create and assign roles to the project
- self.project = unit.new_project_ref(domain_id=self.domainA['id'])
- self.resource_api.create_project(self.project['id'], self.project)
- self.projectB = unit.new_project_ref(domain_id=self.domainB['id'])
- self.resource_api.create_project(self.projectB['id'], self.projectB)
- self.assignment_api.create_grant(self.admin_role['id'],
- user_id=self.project_admin_user['id'],
- project_id=self.project['id'])
- self.assignment_api.create_grant(
- self.admin_role['id'], user_id=self.project_adminB_user['id'],
- project_id=self.projectB['id'])
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.just_a_user['id'],
- project_id=self.project['id'])
-
- def _stati(self, expected_status):
- # Return the expected return codes for APIs with and without data
- # with any specified status overriding the normal values
- if expected_status is None:
- return (http_client.OK, http_client.CREATED,
- http_client.NO_CONTENT)
- else:
- return (expected_status, expected_status, expected_status)
-
- def _test_user_management(self, domain_id, expected=None):
- status_OK, status_created, status_no_data = self._stati(expected)
- entity_url = '/users/%s' % self.just_a_user['id']
- list_url = '/users?domain_id=%s' % domain_id
-
- self.get(entity_url, auth=self.auth,
- expected_status=status_OK)
- self.get(list_url, auth=self.auth,
- expected_status=status_OK)
- user = {'description': 'Updated'}
- self.patch(entity_url, auth=self.auth, body={'user': user},
- expected_status=status_OK)
- self.delete(entity_url, auth=self.auth,
- expected_status=status_no_data)
-
- user_ref = unit.new_user_ref(domain_id=domain_id)
- self.post('/users', auth=self.auth, body={'user': user_ref},
- expected_status=status_created)
-
- def _test_project_management(self, domain_id, expected=None):
- status_OK, status_created, status_no_data = self._stati(expected)
- entity_url = '/projects/%s' % self.project['id']
- list_url = '/projects?domain_id=%s' % domain_id
-
- self.get(entity_url, auth=self.auth,
- expected_status=status_OK)
- self.get(list_url, auth=self.auth,
- expected_status=status_OK)
- project = {'description': 'Updated'}
- self.patch(entity_url, auth=self.auth, body={'project': project},
- expected_status=status_OK)
- self.delete(entity_url, auth=self.auth,
- expected_status=status_no_data)
-
- proj_ref = unit.new_project_ref(domain_id=domain_id)
- self.post('/projects', auth=self.auth, body={'project': proj_ref},
- expected_status=status_created)
-
- def _test_domain_management(self, expected=None):
- status_OK, status_created, status_no_data = self._stati(expected)
- entity_url = '/domains/%s' % self.domainB['id']
- list_url = '/domains'
-
- self.get(entity_url, auth=self.auth,
- expected_status=status_OK)
- self.get(list_url, auth=self.auth,
- expected_status=status_OK)
- domain = {'description': 'Updated', 'enabled': False}
- self.patch(entity_url, auth=self.auth, body={'domain': domain},
- expected_status=status_OK)
- self.delete(entity_url, auth=self.auth,
- expected_status=status_no_data)
-
- domain_ref = unit.new_domain_ref()
- self.post('/domains', auth=self.auth, body={'domain': domain_ref},
- expected_status=status_created)
-
- def _test_grants(self, target, entity_id, role_domain_id=None,
- list_status_OK=False, expected=None):
- status_OK, status_created, status_no_data = self._stati(expected)
- a_role = unit.new_role_ref(domain_id=role_domain_id)
- self.role_api.create_role(a_role['id'], a_role)
-
- collection_url = (
- '/%(target)s/%(target_id)s/users/%(user_id)s/roles' % {
- 'target': target,
- 'target_id': entity_id,
- 'user_id': self.just_a_user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': a_role['id']}
-
- self.put(member_url, auth=self.auth,
- expected_status=status_no_data)
- self.head(member_url, auth=self.auth,
- expected_status=status_no_data)
- if list_status_OK:
- self.get(collection_url, auth=self.auth)
- else:
- self.get(collection_url, auth=self.auth,
- expected_status=status_OK)
- self.delete(member_url, auth=self.auth,
- expected_status=status_no_data)
-
- def _role_management_cases(self, read_status_OK=False, expected=None):
- # Set the different status values for different types of call depending
- # on whether we expect the calls to fail or not.
- status_OK, status_created, status_no_data = self._stati(expected)
- entity_url = '/roles/%s' % self.role['id']
- list_url = '/roles'
-
- if read_status_OK:
- self.get(entity_url, auth=self.auth)
- self.get(list_url, auth=self.auth)
- else:
- self.get(entity_url, auth=self.auth,
- expected_status=status_OK)
- self.get(list_url, auth=self.auth,
- expected_status=status_OK)
-
- role = {'name': 'Updated'}
- self.patch(entity_url, auth=self.auth, body={'role': role},
- expected_status=status_OK)
- self.delete(entity_url, auth=self.auth,
- expected_status=status_no_data)
-
- role_ref = unit.new_role_ref()
- self.post('/roles', auth=self.auth, body={'role': role_ref},
- expected_status=status_created)
-
- def _domain_role_management_cases(self, domain_id, read_status_OK=False,
- expected=None):
- # Set the different status values for different types of call depending
- # on whether we expect the calls to fail or not.
- status_OK, status_created, status_no_data = self._stati(expected)
- entity_url = '/roles/%s' % self.roleA['id']
- list_url = '/roles?domain_id=%s' % domain_id
-
- if read_status_OK:
- self.get(entity_url, auth=self.auth)
- self.get(list_url, auth=self.auth)
- else:
- self.get(entity_url, auth=self.auth,
- expected_status=status_OK)
- self.get(list_url, auth=self.auth,
- expected_status=status_OK)
-
- role = {'name': 'Updated'}
- self.patch(entity_url, auth=self.auth, body={'role': role},
- expected_status=status_OK)
- self.delete(entity_url, auth=self.auth,
- expected_status=status_no_data)
-
- role_ref = unit.new_role_ref(domain_id=domain_id)
- self.post('/roles', auth=self.auth, body={'role': role_ref},
- expected_status=status_created)
-
- def test_user_management(self):
- # First, authenticate with a user that does not have the domain
- # admin role - shouldn't be able to do much.
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_user_management(
- self.domainA['id'], expected=exception.ForbiddenAction.code)
-
- # Now, authenticate with a user that does have the domain admin role
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_user_management(self.domainA['id'])
-
- def test_user_management_normalized_keys(self):
- """Illustrate the inconsistent handling of hyphens in keys.
-
- To quote Morgan in bug 1526244:
-
- the reason this is converted from "domain-id" to "domain_id" is
- because of how we process/normalize data. The way we have to handle
- specific data types for known columns requires avoiding "-" in the
- actual python code since "-" is not valid for attributes in python
- w/o significant use of "getattr" etc.
-
- In short, historically we handle some things in conversions. The
- use of "extras" has long been a poor design choice that leads to
- odd/strange inconsistent behaviors because of other choices made in
- handling data from within the body. (In many cases we convert from
- "-" to "_" throughout openstack)
-
- Source: https://bugs.launchpad.net/keystone/+bug/1526244/comments/9
-
- """
- # Authenticate with a user that has the domain admin role
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- # Show that we can read a normal user without any surprises.
- r = self.get(
- '/users/%s' % self.just_a_user['id'],
- auth=self.auth,
- expected_status=http_client.OK)
- self.assertValidUserResponse(r)
-
- # We don't normalize query string keys, so both of these result in a
- # 403, because we didn't specify a domain_id query string in either
- # case, and we explicitly require one (it doesn't matter what
- # 'domain-id' value you use).
- self.get(
- '/users?domain-id=%s' % self.domainA['id'],
- auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
- self.get(
- '/users?domain-id=%s' % self.domainB['id'],
- auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
-
- # If we try updating the user's 'domain_id' by specifying a
- # 'domain-id', then it'll be stored into extras rather than normalized,
- # and the user's actual 'domain_id' is not affected.
- r = self.patch(
- '/users/%s' % self.just_a_user['id'],
- auth=self.auth,
- body={'user': {'domain-id': self.domainB['id']}},
- expected_status=http_client.OK)
- self.assertEqual(self.domainB['id'], r.json['user']['domain-id'])
- self.assertEqual(self.domainA['id'], r.json['user']['domain_id'])
- self.assertNotEqual(self.domainB['id'], self.just_a_user['domain_id'])
- self.assertValidUserResponse(r, self.just_a_user)
-
- # Finally, show that we can create a new user without any surprises.
- # But if we specify a 'domain-id' instead of a 'domain_id', we get a
- # Forbidden response because we fail a policy check before
- # normalization occurs.
- user_ref = unit.new_user_ref(domain_id=self.domainA['id'])
- r = self.post(
- '/users',
- auth=self.auth,
- body={'user': user_ref},
- expected_status=http_client.CREATED)
- self.assertValidUserResponse(r, ref=user_ref)
- user_ref['domain-id'] = user_ref.pop('domain_id')
- self.post(
- '/users',
- auth=self.auth,
- body={'user': user_ref},
- expected_status=exception.ForbiddenAction.code)
-
- def test_user_management_by_cloud_admin(self):
- # Test users management with a cloud admin. This user should
- # be able to manage users in any domain.
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- self._test_user_management(self.domainA['id'])
-
- def test_project_management(self):
- # First, authenticate with a user that does not have the project
- # admin role - shouldn't be able to do much.
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_project_management(
- self.domainA['id'], expected=exception.ForbiddenAction.code)
-
- # ...but should still be able to list projects of which they are
- # a member
- url = '/users/%s/projects' % self.just_a_user['id']
- self.get(url, auth=self.auth)
-
- # Now, authenticate with a user that does have the domain admin role
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_project_management(self.domainA['id'])
-
- def test_project_management_by_cloud_admin(self):
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- # Check whether cloud admin can operate a domain
- # other than its own domain or not
- self._test_project_management(self.domainA['id'])
-
- def test_domain_grants(self):
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_grants('domains', self.domainA['id'],
- expected=exception.ForbiddenAction.code)
-
- # Now, authenticate with a user that does have the domain admin role
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_grants('domains', self.domainA['id'])
-
- # Check that with such a token we cannot modify grants on a
- # different domain
- self._test_grants('domains', self.domainB['id'],
- expected=exception.ForbiddenAction.code)
-
- def test_domain_grants_by_cloud_admin(self):
- # Test domain grants with a cloud admin. This user should be
- # able to manage roles on any domain.
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- self._test_grants('domains', self.domainA['id'])
-
- def test_domain_grants_by_cloud_admin_for_domain_specific_role(self):
- # Test domain grants with a cloud admin. This user should be
- # able to manage domain roles on any domain.
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- self._test_grants('domains', self.domainA['id'],
- role_domain_id=self.domainB['id'])
-
- def test_domain_grants_by_non_admin_for_domain_specific_role(self):
- # A non-admin shouldn't be able to do anything
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_grants('domains', self.domainA['id'],
- role_domain_id=self.domainA['id'],
- expected=exception.ForbiddenAction.code)
- self._test_grants('domains', self.domainA['id'],
- role_domain_id=self.domainB['id'],
- expected=exception.ForbiddenAction.code)
-
- def test_domain_grants_by_domain_admin_for_domain_specific_role(self):
- # Authenticate with a user that does have the domain admin role,
- # should not be able to assign a domain_specific role from another
- # domain
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_grants('domains', self.domainA['id'],
- role_domain_id=self.domainB['id'],
- # List status will always be OK, since we are not
- # granting/checking/deleting assignments
- list_status_OK=True,
- expected=exception.ForbiddenAction.code)
-
- # They should be able to assign a domain specific role from the same
- # domain
- self._test_grants('domains', self.domainA['id'],
- role_domain_id=self.domainA['id'])
-
- def test_project_grants(self):
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- project_id=self.project['id'])
-
- self._test_grants('projects', self.project['id'],
- expected=exception.ForbiddenAction.code)
-
- # Now, authenticate with a user that does have the project
- # admin role
- self.auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- self._test_grants('projects', self.project['id'])
-
- def test_project_grants_by_domain_admin(self):
- # Test project grants with a domain admin. This user should be
- # able to manage roles on any project in its own domain.
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_grants('projects', self.project['id'])
-
- def test_project_grants_by_non_admin_for_domain_specific_role(self):
- # A non-admin shouldn't be able to do anything
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- project_id=self.project['id'])
-
- self._test_grants('projects', self.project['id'],
- role_domain_id=self.domainA['id'],
- expected=exception.ForbiddenAction.code)
- self._test_grants('projects', self.project['id'],
- role_domain_id=self.domainB['id'],
- expected=exception.ForbiddenAction.code)
-
- def test_project_grants_by_project_admin_for_domain_specific_role(self):
- # Authenticate with a user that does have the project admin role,
- # should not be able to assign a domain_specific role from another
- # domain
- self.auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- self._test_grants('projects', self.project['id'],
- role_domain_id=self.domainB['id'],
- # List status will always be OK, since we are not
- # granting/checking/deleting assignments
- list_status_OK=True,
- expected=exception.ForbiddenAction.code)
-
- # They should be able to assign a domain specific role from the same
- # domain
- self._test_grants('projects', self.project['id'],
- role_domain_id=self.domainA['id'])
-
- def test_project_grants_by_domain_admin_for_domain_specific_role(self):
- # Authenticate with a user that does have the domain admin role,
- # should not be able to assign a domain_specific role from another
- # domain
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_grants('projects', self.project['id'],
- role_domain_id=self.domainB['id'],
- # List status will always be OK, since we are not
- # granting/checking/deleting assignments
- list_status_OK=True,
- expected=exception.ForbiddenAction.code)
-
- # They should be able to assign a domain specific role from the same
- # domain
- self._test_grants('projects', self.project['id'],
- role_domain_id=self.domainA['id'])
-
- def test_cloud_admin_list_assignments_of_domain(self):
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- collection_url = self.build_role_assignment_query_url(
- domain_id=self.domainA['id'])
- r = self.get(collection_url, auth=self.auth)
- self.assertValidRoleAssignmentListResponse(
- r, expected_length=2, resource_url=collection_url)
-
- domainA_admin_entity = self.build_role_assignment_entity(
- domain_id=self.domainA['id'],
- user_id=self.domain_admin_user['id'],
- role_id=self.admin_role['id'],
- inherited_to_projects=False)
- domainA_user_entity = self.build_role_assignment_entity(
- domain_id=self.domainA['id'],
- user_id=self.just_a_user['id'],
- role_id=self.role['id'],
- inherited_to_projects=False)
-
- self.assertRoleAssignmentInListResponse(r, domainA_admin_entity)
- self.assertRoleAssignmentInListResponse(r, domainA_user_entity)
-
- def test_domain_admin_list_assignments_of_domain(self):
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- collection_url = self.build_role_assignment_query_url(
- domain_id=self.domainA['id'])
- r = self.get(collection_url, auth=self.auth)
- self.assertValidRoleAssignmentListResponse(
- r, expected_length=2, resource_url=collection_url)
-
- domainA_admin_entity = self.build_role_assignment_entity(
- domain_id=self.domainA['id'],
- user_id=self.domain_admin_user['id'],
- role_id=self.admin_role['id'],
- inherited_to_projects=False)
- domainA_user_entity = self.build_role_assignment_entity(
- domain_id=self.domainA['id'],
- user_id=self.just_a_user['id'],
- role_id=self.role['id'],
- inherited_to_projects=False)
-
- self.assertRoleAssignmentInListResponse(r, domainA_admin_entity)
- self.assertRoleAssignmentInListResponse(r, domainA_user_entity)
-
- def test_domain_admin_list_assignments_of_another_domain_failed(self):
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- collection_url = self.build_role_assignment_query_url(
- domain_id=self.domainB['id'])
- self.get(collection_url, auth=self.auth,
- expected_status=http_client.FORBIDDEN)
-
- def test_domain_user_list_assignments_of_domain_failed(self):
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- collection_url = self.build_role_assignment_query_url(
- domain_id=self.domainA['id'])
- self.get(collection_url, auth=self.auth,
- expected_status=http_client.FORBIDDEN)
-
- def test_cloud_admin_list_assignments_of_project(self):
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- collection_url = self.build_role_assignment_query_url(
- project_id=self.project['id'])
- r = self.get(collection_url, auth=self.auth)
- self.assertValidRoleAssignmentListResponse(
- r, expected_length=2, resource_url=collection_url)
-
- project_admin_entity = self.build_role_assignment_entity(
- project_id=self.project['id'],
- user_id=self.project_admin_user['id'],
- role_id=self.admin_role['id'],
- inherited_to_projects=False)
- project_user_entity = self.build_role_assignment_entity(
- project_id=self.project['id'],
- user_id=self.just_a_user['id'],
- role_id=self.role['id'],
- inherited_to_projects=False)
-
- self.assertRoleAssignmentInListResponse(r, project_admin_entity)
- self.assertRoleAssignmentInListResponse(r, project_user_entity)
-
- def test_admin_project_list_assignments_of_project(self):
- self.auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- collection_url = self.build_role_assignment_query_url(
- project_id=self.project['id'])
- r = self.get(collection_url, auth=self.auth)
- self.assertValidRoleAssignmentListResponse(
- r, expected_length=2, resource_url=collection_url)
-
- project_admin_entity = self.build_role_assignment_entity(
- project_id=self.project['id'],
- user_id=self.project_admin_user['id'],
- role_id=self.admin_role['id'],
- inherited_to_projects=False)
- project_user_entity = self.build_role_assignment_entity(
- project_id=self.project['id'],
- user_id=self.just_a_user['id'],
- role_id=self.role['id'],
- inherited_to_projects=False)
-
- self.assertRoleAssignmentInListResponse(r, project_admin_entity)
- self.assertRoleAssignmentInListResponse(r, project_user_entity)
-
- @utils.wip('waiting on bug #1437407')
- def test_domain_admin_list_assignments_of_project(self):
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- collection_url = self.build_role_assignment_query_url(
- project_id=self.project['id'])
- r = self.get(collection_url, auth=self.auth)
- self.assertValidRoleAssignmentListResponse(
- r, expected_length=2, resource_url=collection_url)
-
- project_admin_entity = self.build_role_assignment_entity(
- project_id=self.project['id'],
- user_id=self.project_admin_user['id'],
- role_id=self.admin_role['id'],
- inherited_to_projects=False)
- project_user_entity = self.build_role_assignment_entity(
- project_id=self.project['id'],
- user_id=self.just_a_user['id'],
- role_id=self.role['id'],
- inherited_to_projects=False)
-
- self.assertRoleAssignmentInListResponse(r, project_admin_entity)
- self.assertRoleAssignmentInListResponse(r, project_user_entity)
-
- def test_domain_admin_list_assignment_tree(self):
- # Add a child project to the standard test data
- sub_project = unit.new_project_ref(domain_id=self.domainA['id'],
- parent_id=self.project['id'])
- self.resource_api.create_project(sub_project['id'], sub_project)
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.just_a_user['id'],
- project_id=sub_project['id'])
-
- collection_url = self.build_role_assignment_query_url(
- project_id=self.project['id'])
- collection_url += '&include_subtree=True'
-
- # The domain admin should be able to list the assignment tree
- auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- r = self.get(collection_url, auth=auth)
- self.assertValidRoleAssignmentListResponse(
- r, expected_length=3, resource_url=collection_url)
-
- # A project admin should not be able to
- auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- r = self.get(collection_url, auth=auth,
- expected_status=http_client.FORBIDDEN)
-
- # A neither should a domain admin from a different domain
- domainB_admin_user = unit.create_user(
- self.identity_api,
- domain_id=self.domainB['id'])
- self.assignment_api.create_grant(self.admin_role['id'],
- user_id=domainB_admin_user['id'],
- domain_id=self.domainB['id'])
- auth = self.build_authentication_request(
- user_id=domainB_admin_user['id'],
- password=domainB_admin_user['password'],
- domain_id=self.domainB['id'])
-
- r = self.get(collection_url, auth=auth,
- expected_status=http_client.FORBIDDEN)
-
- def test_domain_user_list_assignments_of_project_failed(self):
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- collection_url = self.build_role_assignment_query_url(
- project_id=self.project['id'])
- self.get(collection_url, auth=self.auth,
- expected_status=http_client.FORBIDDEN)
-
- def test_cloud_admin(self):
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._test_domain_management(
- expected=exception.ForbiddenAction.code)
-
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- self._test_domain_management()
-
- def test_admin_project(self):
- self.auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- self._test_domain_management(
- expected=exception.ForbiddenAction.code)
-
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- self._test_domain_management()
-
- def test_domain_admin_get_domain(self):
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
- entity_url = '/domains/%s' % self.domainA['id']
- self.get(entity_url, auth=self.auth)
-
- def test_list_user_credentials(self):
- credential_user = unit.new_credential_ref(self.just_a_user['id'])
- self.credential_api.create_credential(credential_user['id'],
- credential_user)
- credential_admin = unit.new_credential_ref(self.cloud_admin_user['id'])
- self.credential_api.create_credential(credential_admin['id'],
- credential_admin)
-
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- url = '/credentials?user_id=%s' % self.just_a_user['id']
- self.get(url, auth=self.auth)
- url = '/credentials?user_id=%s' % self.cloud_admin_user['id']
- self.get(url, auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
- url = '/credentials'
- self.get(url, auth=self.auth,
- expected_status=exception.ForbiddenAction.code)
-
- def test_get_and_delete_ec2_credentials(self):
- """Tests getting and deleting ec2 credentials through the ec2 API."""
- another_user = unit.create_user(self.identity_api,
- domain_id=self.domainA['id'])
-
- # create a credential for just_a_user
- just_user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- project_id=self.project['id'])
- url = '/users/%s/credentials/OS-EC2' % self.just_a_user['id']
- r = self.post(url, body={'tenant_id': self.project['id']},
- auth=just_user_auth)
-
- # another normal user can't get the credential
- another_user_auth = self.build_authentication_request(
- user_id=another_user['id'],
- password=another_user['password'])
- another_user_url = '/users/%s/credentials/OS-EC2/%s' % (
- another_user['id'], r.result['credential']['access'])
- self.get(another_user_url, auth=another_user_auth,
- expected_status=exception.ForbiddenAction.code)
-
- # the owner can get the credential
- just_user_url = '/users/%s/credentials/OS-EC2/%s' % (
- self.just_a_user['id'], r.result['credential']['access'])
- self.get(just_user_url, auth=just_user_auth)
-
- # another normal user can't delete the credential
- self.delete(another_user_url, auth=another_user_auth,
- expected_status=exception.ForbiddenAction.code)
-
- # the owner can get the credential
- self.delete(just_user_url, auth=just_user_auth)
-
- def test_user_validate_same_token(self):
- # Given a non-admin user token, the token can be used to validate
- # itself.
- # This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token = self.get_requested_token(auth)
-
- self.get('/auth/tokens', token=token,
- headers={'X-Subject-Token': token})
-
- def test_user_validate_user_token(self):
- # A user can validate one of their own tokens.
- # This is GET /v3/auth/tokens
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token1 = self.get_requested_token(auth)
- token2 = self.get_requested_token(auth)
-
- self.get('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2})
-
- def test_user_validate_other_user_token_rejected(self):
- # A user cannot validate another user's token.
- # This is GET /v3/auth/tokens
-
- user1_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user1_token = self.get_requested_token(user1_auth)
-
- user2_auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'])
- user2_token = self.get_requested_token(user2_auth)
-
- self.get('/auth/tokens', token=user1_token,
- headers={'X-Subject-Token': user2_token},
- expected_status=http_client.FORBIDDEN)
-
- def test_admin_validate_user_token(self):
- # An admin can validate a user's token.
- # This is GET /v3/auth/tokens
-
- admin_auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
- admin_token = self.get_requested_token(admin_auth)
-
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user_token = self.get_requested_token(user_auth)
-
- self.get('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token})
-
- def test_admin_project_validate_user_token(self):
- # An admin can validate a user's token.
- # This is GET /v3/auth/tokens
-
- admin_auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- admin_token = self.get_requested_token(admin_auth)
-
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user_token = self.get_requested_token(user_auth)
-
- self.get('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token})
-
- def test_user_check_same_token(self):
- # Given a non-admin user token, the token can be used to check
- # itself.
- # This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token = self.get_requested_token(auth)
-
- self.head('/auth/tokens', token=token,
- headers={'X-Subject-Token': token},
- expected_status=http_client.OK)
-
- def test_user_check_user_token(self):
- # A user can check one of their own tokens.
- # This is HEAD /v3/auth/tokens
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token1 = self.get_requested_token(auth)
- token2 = self.get_requested_token(auth)
-
- self.head('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2},
- expected_status=http_client.OK)
-
- def test_user_check_other_user_token_rejected(self):
- # A user cannot check another user's token.
- # This is HEAD /v3/auth/tokens
-
- user1_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user1_token = self.get_requested_token(user1_auth)
-
- user2_auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'])
- user2_token = self.get_requested_token(user2_auth)
-
- self.head('/auth/tokens', token=user1_token,
- headers={'X-Subject-Token': user2_token},
- expected_status=http_client.FORBIDDEN)
-
- def test_admin_check_user_token(self):
- # An admin can check a user's token.
- # This is HEAD /v3/auth/tokens
-
- admin_auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
- admin_token = self.get_requested_token(admin_auth)
-
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user_token = self.get_requested_token(user_auth)
-
- self.head('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token},
- expected_status=http_client.OK)
-
- def test_user_revoke_same_token(self):
- # Given a non-admin user token, the token can be used to revoke
- # itself.
- # This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token = self.get_requested_token(auth)
-
- self.delete('/auth/tokens', token=token,
- headers={'X-Subject-Token': token})
-
- def test_user_revoke_user_token(self):
- # A user can revoke one of their own tokens.
- # This is DELETE /v3/auth/tokens
-
- auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- token1 = self.get_requested_token(auth)
- token2 = self.get_requested_token(auth)
-
- self.delete('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2})
-
- def test_user_revoke_other_user_token_rejected(self):
- # A user cannot revoke another user's token.
- # This is DELETE /v3/auth/tokens
-
- user1_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user1_token = self.get_requested_token(user1_auth)
-
- user2_auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'])
- user2_token = self.get_requested_token(user2_auth)
-
- self.delete('/auth/tokens', token=user1_token,
- headers={'X-Subject-Token': user2_token},
- expected_status=http_client.FORBIDDEN)
-
- def test_admin_revoke_user_token(self):
- # An admin can revoke a user's token.
- # This is DELETE /v3/auth/tokens
-
- admin_auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
- admin_token = self.get_requested_token(admin_auth)
-
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'])
- user_token = self.get_requested_token(user_auth)
-
- self.delete('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token})
-
- def test_user_with_a_role_get_project(self):
- user_auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- project_id=self.project['id'])
-
- # Test user can get project for one they have a role in
- self.get('/projects/%s' % self.project['id'], auth=user_auth)
-
- # Test user can not get project for one they don't have a role in,
- # even if they have a role on another project
- project2 = unit.new_project_ref(domain_id=self.domainA['id'])
- self.resource_api.create_project(project2['id'], project2)
- self.get('/projects/%s' % project2['id'], auth=user_auth,
- expected_status=exception.ForbiddenAction.code)
-
- def test_project_admin_get_project(self):
- admin_auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- resp = self.get('/projects/%s' % self.project['id'], auth=admin_auth)
- self.assertEqual(self.project['id'],
- jsonutils.loads(resp.body)['project']['id'])
-
- def test_role_management_no_admin_no_rights(self):
- # A non-admin domain user shouldn't be able to manipulate roles
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- self._role_management_cases(expected=exception.ForbiddenAction.code)
-
- # ...and nor should non-admin project user
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- project_id=self.project['id'])
-
- self._role_management_cases(expected=exception.ForbiddenAction.code)
-
- def test_role_management_with_project_admin(self):
- # A project admin user should be able to get and list, but not be able
- # to create/update/delete global roles
- self.auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- self._role_management_cases(read_status_OK=True,
- expected=exception.ForbiddenAction.code)
-
- def test_role_management_with_domain_admin(self):
- # A domain admin user should be able to get and list, but not be able
- # to create/update/delete global roles
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._role_management_cases(read_status_OK=True,
- expected=exception.ForbiddenAction.code)
-
- def test_role_management_with_cloud_admin(self):
- # A cloud admin user should have rights to manipulate global roles
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- self._role_management_cases()
-
- def test_domain_role_management_no_admin_no_rights(self):
- # A non-admin domain user shouldn't be able to manipulate domain roles
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- domain_id=self.domainA['id'])
-
- self._domain_role_management_cases(
- self.domainA['id'], expected=exception.ForbiddenAction.code)
-
- # ...and nor should non-admin project user
- self.auth = self.build_authentication_request(
- user_id=self.just_a_user['id'],
- password=self.just_a_user['password'],
- project_id=self.project['id'])
-
- self._domain_role_management_cases(
- self.domainA['id'], expected=exception.ForbiddenAction.code)
-
- def test_domain_role_management_with_cloud_admin(self):
- # A cloud admin user should have rights to manipulate domain roles
- self.auth = self.build_authentication_request(
- user_id=self.cloud_admin_user['id'],
- password=self.cloud_admin_user['password'],
- project_id=self.admin_project['id'])
-
- self._domain_role_management_cases(self.domainA['id'])
-
- def test_domain_role_management_with_domain_admin(self):
- # A domain admin user should only be able to manipulate the domain
- # specific roles in their own domain
- self.auth = self.build_authentication_request(
- user_id=self.domainB_admin_user['id'],
- password=self.domainB_admin_user['password'],
- domain_id=self.domainB['id'])
-
- # Try to access the domain specific roles in another domain
- self._domain_role_management_cases(
- self.domainA['id'], expected=exception.ForbiddenAction.code)
-
- # ...but they should be able to work with those in their own domain
- self.auth = self.build_authentication_request(
- user_id=self.domain_admin_user['id'],
- password=self.domain_admin_user['password'],
- domain_id=self.domainA['id'])
-
- self._domain_role_management_cases(self.domainA['id'])
-
- def test_domain_role_management_with_project_admin(self):
- # A project admin user should have not access to domain specific roles
- # in another domain. They should be able to get and list domain
- # specific roles from their own domain, but not be able to create,
- # update or delete them,
- self.auth = self.build_authentication_request(
- user_id=self.project_adminB_user['id'],
- password=self.project_adminB_user['password'],
- project_id=self.projectB['id'])
-
- # Try access the domain specific roless in another domain
- self._domain_role_management_cases(
- self.domainA['id'], expected=exception.ForbiddenAction.code)
-
- # ...but they should be ablet to work with those in their own domain
- self.auth = self.build_authentication_request(
- user_id=self.project_admin_user['id'],
- password=self.project_admin_user['password'],
- project_id=self.project['id'])
-
- self._domain_role_management_cases(
- self.domainA['id'], read_status_OK=True,
- expected=exception.ForbiddenAction.code)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_resource.py b/keystone-moon/keystone/tests/unit/test_v3_resource.py
deleted file mode 100644
index f54fcb57..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_resource.py
+++ /dev/null
@@ -1,1434 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-from oslo_config import cfg
-from six.moves import http_client
-from six.moves import range
-from testtools import matchers
-
-from keystone.common import controller
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-from keystone.tests.unit import utils as test_utils
-
-
-CONF = cfg.CONF
-
-
-class ResourceTestCase(test_v3.RestfulTestCase,
- test_v3.AssignmentTestMixin):
- """Test domains and projects."""
-
- # Domain CRUD tests
-
- def test_create_domain(self):
- """Call ``POST /domains``."""
- ref = unit.new_domain_ref()
- r = self.post(
- '/domains',
- body={'domain': ref})
- return self.assertValidDomainResponse(r, ref)
-
- def test_create_domain_case_sensitivity(self):
- """Call `POST /domains`` twice with upper() and lower() cased name."""
- ref = unit.new_domain_ref()
-
- # ensure the name is lowercase
- ref['name'] = ref['name'].lower()
- r = self.post(
- '/domains',
- body={'domain': ref})
- self.assertValidDomainResponse(r, ref)
-
- # ensure the name is uppercase
- ref['name'] = ref['name'].upper()
- r = self.post(
- '/domains',
- body={'domain': ref})
- self.assertValidDomainResponse(r, ref)
-
- def test_create_domain_bad_request(self):
- """Call ``POST /domains``."""
- self.post('/domains', body={'domain': {}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_domain_unsafe(self):
- """Call ``POST /domains with unsafe names``."""
- unsafe_name = 'i am not / safe'
-
- self.config_fixture.config(group='resource',
- domain_name_url_safe='off')
- ref = unit.new_domain_ref(name=unsafe_name)
- self.post(
- '/domains',
- body={'domain': ref})
-
- for config_setting in ['new', 'strict']:
- self.config_fixture.config(group='resource',
- domain_name_url_safe=config_setting)
- ref = unit.new_domain_ref(name=unsafe_name)
- self.post(
- '/domains',
- body={'domain': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_domain_unsafe_default(self):
- """Check default for unsafe names for ``POST /domains``."""
- unsafe_name = 'i am not / safe'
-
- # By default, we should be able to create unsafe names
- ref = unit.new_domain_ref(name=unsafe_name)
- self.post(
- '/domains',
- body={'domain': ref})
-
- def test_create_domain_creates_is_domain_project(self):
- """Check a project that acts as a domain is created.
-
- Call ``POST /domains``.
- """
- # Create a new domain
- domain_ref = unit.new_domain_ref()
- r = self.post('/domains', body={'domain': domain_ref})
- self.assertValidDomainResponse(r, domain_ref)
-
- # Retrieve its correspondent project
- r = self.get('/projects/%(project_id)s' % {
- 'project_id': r.result['domain']['id']})
- self.assertValidProjectResponse(r)
-
- # The created project has is_domain flag as True
- self.assertTrue(r.result['project']['is_domain'])
-
- # And its parent_id and domain_id attributes are equal
- self.assertIsNone(r.result['project']['parent_id'])
- self.assertIsNone(r.result['project']['domain_id'])
-
- def test_create_is_domain_project_creates_domain(self):
- """Call ``POST /projects`` is_domain and check a domain is created."""
- # Create a new project that acts as a domain
- project_ref = unit.new_project_ref(domain_id=None, is_domain=True)
- r = self.post('/projects', body={'project': project_ref})
- self.assertValidProjectResponse(r)
-
- # Retrieve its correspondent domain
- r = self.get('/domains/%(domain_id)s' % {
- 'domain_id': r.result['project']['id']})
- self.assertValidDomainResponse(r)
- self.assertIsNotNone(r.result['domain'])
-
- def test_list_domains(self):
- """Call ``GET /domains``."""
- resource_url = '/domains'
- r = self.get(resource_url)
- self.assertValidDomainListResponse(r, ref=self.domain,
- resource_url=resource_url)
-
- def test_get_domain(self):
- """Call ``GET /domains/{domain_id}``."""
- r = self.get('/domains/%(domain_id)s' % {
- 'domain_id': self.domain_id})
- self.assertValidDomainResponse(r, self.domain)
-
- def test_update_domain(self):
- """Call ``PATCH /domains/{domain_id}``."""
- ref = unit.new_domain_ref()
- del ref['id']
- r = self.patch('/domains/%(domain_id)s' % {
- 'domain_id': self.domain_id},
- body={'domain': ref})
- self.assertValidDomainResponse(r, ref)
-
- def test_update_domain_unsafe(self):
- """Call ``POST /domains/{domain_id} with unsafe names``."""
- unsafe_name = 'i am not / safe'
-
- self.config_fixture.config(group='resource',
- domain_name_url_safe='off')
- ref = unit.new_domain_ref(name=unsafe_name)
- del ref['id']
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': self.domain_id},
- body={'domain': ref})
-
- unsafe_name = 'i am still not / safe'
- for config_setting in ['new', 'strict']:
- self.config_fixture.config(group='resource',
- domain_name_url_safe=config_setting)
- ref = unit.new_domain_ref(name=unsafe_name)
- del ref['id']
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': self.domain_id},
- body={'domain': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_domain_unsafe_default(self):
- """Check default for unsafe names for ``POST /domains``."""
- unsafe_name = 'i am not / safe'
-
- # By default, we should be able to create unsafe names
- ref = unit.new_domain_ref(name=unsafe_name)
- del ref['id']
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': self.domain_id},
- body={'domain': ref})
-
- def test_update_domain_updates_is_domain_project(self):
- """Check the project that acts as a domain is updated.
-
- Call ``PATCH /domains``.
- """
- # Create a new domain
- domain_ref = unit.new_domain_ref()
- r = self.post('/domains', body={'domain': domain_ref})
- self.assertValidDomainResponse(r, domain_ref)
-
- # Disable it
- self.patch('/domains/%s' % r.result['domain']['id'],
- body={'domain': {'enabled': False}})
-
- # Retrieve its correspondent project
- r = self.get('/projects/%(project_id)s' % {
- 'project_id': r.result['domain']['id']})
- self.assertValidProjectResponse(r)
-
- # The created project is disabled as well
- self.assertFalse(r.result['project']['enabled'])
-
- def test_disable_domain(self):
- """Call ``PATCH /domains/{domain_id}`` (set enabled=False)."""
- # Create a 2nd set of entities in a 2nd domain
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
-
- project2 = unit.new_project_ref(domain_id=domain2['id'])
- self.resource_api.create_project(project2['id'], project2)
-
- user2 = unit.create_user(self.identity_api,
- domain_id=domain2['id'],
- project_id=project2['id'])
-
- self.assignment_api.add_user_to_project(project2['id'],
- user2['id'])
-
- # First check a user in that domain can authenticate..
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'userId': user2['id'],
- 'password': user2['password']
- },
- 'tenantId': project2['id']
- }
- }
- self.admin_request(
- path='/v2.0/tokens', method='POST', body=body)
-
- auth_data = self.build_authentication_request(
- user_id=user2['id'],
- password=user2['password'],
- project_id=project2['id'])
- self.v3_create_token(auth_data)
-
- # Now disable the domain
- domain2['enabled'] = False
- r = self.patch('/domains/%(domain_id)s' % {
- 'domain_id': domain2['id']},
- body={'domain': {'enabled': False}})
- self.assertValidDomainResponse(r, domain2)
-
- # Make sure the user can no longer authenticate, via
- # either API
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'userId': user2['id'],
- 'password': user2['password']
- },
- 'tenantId': project2['id']
- }
- }
- self.admin_request(
- path='/v2.0/tokens', method='POST', body=body,
- expected_status=http_client.UNAUTHORIZED)
-
- # Try looking up in v3 by name and id
- auth_data = self.build_authentication_request(
- user_id=user2['id'],
- password=user2['password'],
- project_id=project2['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- auth_data = self.build_authentication_request(
- username=user2['name'],
- user_domain_id=domain2['id'],
- password=user2['password'],
- project_id=project2['id'])
- self.v3_create_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_delete_enabled_domain_fails(self):
- """Call ``DELETE /domains/{domain_id}`` (when domain enabled)."""
- # Try deleting an enabled domain, which should fail
- self.delete('/domains/%(domain_id)s' % {
- 'domain_id': self.domain['id']},
- expected_status=exception.ForbiddenAction.code)
-
- def test_delete_domain(self):
- """Call ``DELETE /domains/{domain_id}``.
-
- The sample data set up already has a user and project that is part of
- self.domain. Additionally we will create a group and a credential
- within it. Since the user we will authenticate with is in this domain,
- we create a another set of entities in a second domain. Deleting this
- second domain should delete all these new entities. In addition,
- all the entities in the regular self.domain should be unaffected
- by the delete.
-
- Test Plan:
-
- - Create domain2 and a 2nd set of entities
- - Disable domain2
- - Delete domain2
- - Check entities in domain2 have been deleted
- - Check entities in self.domain are unaffected
-
- """
- # Create a group and a credential in the main domain
- group = unit.new_group_ref(domain_id=self.domain_id)
- group = self.identity_api.create_group(group)
-
- credential = unit.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id)
- self.credential_api.create_credential(credential['id'], credential)
-
- # Create a 2nd set of entities in a 2nd domain
- domain2 = unit.new_domain_ref()
- self.resource_api.create_domain(domain2['id'], domain2)
-
- project2 = unit.new_project_ref(domain_id=domain2['id'])
- project2 = self.resource_api.create_project(project2['id'], project2)
-
- user2 = unit.new_user_ref(domain_id=domain2['id'],
- project_id=project2['id'])
- user2 = self.identity_api.create_user(user2)
-
- group2 = unit.new_group_ref(domain_id=domain2['id'])
- group2 = self.identity_api.create_group(group2)
-
- credential2 = unit.new_credential_ref(user_id=user2['id'],
- project_id=project2['id'])
- self.credential_api.create_credential(credential2['id'],
- credential2)
-
- # Now disable the new domain and delete it
- domain2['enabled'] = False
- r = self.patch('/domains/%(domain_id)s' % {
- 'domain_id': domain2['id']},
- body={'domain': {'enabled': False}})
- self.assertValidDomainResponse(r, domain2)
- self.delete('/domains/%(domain_id)s' % {'domain_id': domain2['id']})
-
- # Check all the domain2 relevant entities are gone
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain2['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- project2['id'])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group,
- group2['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- user2['id'])
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- credential2['id'])
-
- # ...and that all self.domain entities are still here
- r = self.resource_api.get_domain(self.domain['id'])
- self.assertDictEqual(self.domain, r)
- r = self.resource_api.get_project(self.project['id'])
- self.assertDictEqual(self.project, r)
- r = self.identity_api.get_group(group['id'])
- self.assertDictEqual(group, r)
- r = self.identity_api.get_user(self.user['id'])
- self.user.pop('password')
- self.assertDictEqual(self.user, r)
- r = self.credential_api.get_credential(credential['id'])
- self.assertDictEqual(credential, r)
-
- def test_delete_domain_deletes_is_domain_project(self):
- """Check the project that acts as a domain is deleted.
-
- Call ``DELETE /domains``.
- """
- # Create a new domain
- domain_ref = unit.new_domain_ref()
- r = self.post('/domains', body={'domain': domain_ref})
- self.assertValidDomainResponse(r, domain_ref)
-
- # Retrieve its correspondent project
- self.get('/projects/%(project_id)s' % {
- 'project_id': r.result['domain']['id']})
-
- # Delete the domain
- self.patch('/domains/%s' % r.result['domain']['id'],
- body={'domain': {'enabled': False}})
- self.delete('/domains/%s' % r.result['domain']['id'])
-
- # The created project is deleted as well
- self.get('/projects/%(project_id)s' % {
- 'project_id': r.result['domain']['id']}, expected_status=404)
-
- def test_delete_default_domain(self):
- # Need to disable it first.
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': CONF.identity.default_domain_id},
- body={'domain': {'enabled': False}})
-
- self.delete(
- '/domains/%(domain_id)s' % {
- 'domain_id': CONF.identity.default_domain_id})
-
- def test_token_revoked_once_domain_disabled(self):
- """Test token from a disabled domain has been invalidated.
-
- Test that a token that was valid for an enabled domain
- becomes invalid once that domain is disabled.
-
- """
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
-
- user2 = unit.create_user(self.identity_api,
- domain_id=domain['id'])
-
- # build a request body
- auth_body = self.build_authentication_request(
- user_id=user2['id'],
- password=user2['password'])
-
- # sends a request for the user's token
- token_resp = self.post('/auth/tokens', body=auth_body)
-
- subject_token = token_resp.headers.get('x-subject-token')
-
- # validates the returned token and it should be valid.
- self.head('/auth/tokens',
- headers={'x-subject-token': subject_token},
- expected_status=http_client.OK)
-
- # now disable the domain
- domain['enabled'] = False
- url = "/domains/%(domain_id)s" % {'domain_id': domain['id']}
- self.patch(url,
- body={'domain': {'enabled': False}})
-
- # validates the same token again and it should be 'not found'
- # as the domain has already been disabled.
- self.head('/auth/tokens',
- headers={'x-subject-token': subject_token},
- expected_status=http_client.NOT_FOUND)
-
- def test_delete_domain_hierarchy(self):
- """Call ``DELETE /domains/{domain_id}``."""
- domain = unit.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
-
- root_project = unit.new_project_ref(domain_id=domain['id'])
- root_project = self.resource_api.create_project(root_project['id'],
- root_project)
-
- leaf_project = unit.new_project_ref(
- domain_id=domain['id'],
- parent_id=root_project['id'])
- self.resource_api.create_project(leaf_project['id'], leaf_project)
-
- # Need to disable it first.
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': domain['id']},
- body={'domain': {'enabled': False}})
-
- self.delete(
- '/domains/%(domain_id)s' % {
- 'domain_id': domain['id']})
-
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project['id'])
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- leaf_project['id'])
-
- def test_forbid_operations_on_federated_domain(self):
- """Make sure one cannot operate on federated domain.
-
- This includes operations like create, update, delete
- on domain identified by id and name where difference variations of
- id 'Federated' are used.
-
- """
- def create_domains():
- for variation in ('Federated', 'FEDERATED',
- 'federated', 'fEderated'):
- domain = unit.new_domain_ref()
- domain['id'] = variation
- yield domain
-
- for domain in create_domains():
- self.assertRaises(
- AssertionError, self.resource_api.create_domain,
- domain['id'], domain)
- self.assertRaises(
- AssertionError, self.resource_api.update_domain,
- domain['id'], domain)
- self.assertRaises(
- exception.DomainNotFound, self.resource_api.delete_domain,
- domain['id'])
-
- # swap 'name' with 'id' and try again, expecting the request to
- # gracefully fail
- domain['id'], domain['name'] = domain['name'], domain['id']
- self.assertRaises(
- AssertionError, self.resource_api.create_domain,
- domain['id'], domain)
- self.assertRaises(
- AssertionError, self.resource_api.update_domain,
- domain['id'], domain)
- self.assertRaises(
- exception.DomainNotFound, self.resource_api.delete_domain,
- domain['id'])
-
- def test_forbid_operations_on_defined_federated_domain(self):
- """Make sure one cannot operate on a user-defined federated domain.
-
- This includes operations like create, update, delete.
-
- """
- non_default_name = 'beta_federated_domain'
- self.config_fixture.config(group='federation',
- federated_domain_name=non_default_name)
- domain = unit.new_domain_ref(name=non_default_name)
- self.assertRaises(AssertionError,
- self.resource_api.create_domain,
- domain['id'], domain)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.delete_domain,
- domain['id'])
- self.assertRaises(AssertionError,
- self.resource_api.update_domain,
- domain['id'], domain)
-
- # Project CRUD tests
-
- def test_list_projects(self):
- """Call ``GET /projects``."""
- resource_url = '/projects'
- r = self.get(resource_url)
- self.assertValidProjectListResponse(r, ref=self.project,
- resource_url=resource_url)
-
- def test_create_project(self):
- """Call ``POST /projects``."""
- ref = unit.new_project_ref(domain_id=self.domain_id)
- r = self.post(
- '/projects',
- body={'project': ref})
- self.assertValidProjectResponse(r, ref)
-
- def test_create_project_bad_request(self):
- """Call ``POST /projects``."""
- self.post('/projects', body={'project': {}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_project_invalid_domain_id(self):
- """Call ``POST /projects``."""
- ref = unit.new_project_ref(domain_id=uuid.uuid4().hex)
- self.post('/projects', body={'project': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_project_unsafe(self):
- """Call ``POST /projects with unsafe names``."""
- unsafe_name = 'i am not / safe'
-
- self.config_fixture.config(group='resource',
- project_name_url_safe='off')
- ref = unit.new_project_ref(name=unsafe_name)
- self.post(
- '/projects',
- body={'project': ref})
-
- for config_setting in ['new', 'strict']:
- self.config_fixture.config(group='resource',
- project_name_url_safe=config_setting)
- ref = unit.new_project_ref(name=unsafe_name)
- self.post(
- '/projects',
- body={'project': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_project_unsafe_default(self):
- """Check default for unsafe names for ``POST /projects``."""
- unsafe_name = 'i am not / safe'
-
- # By default, we should be able to create unsafe names
- ref = unit.new_project_ref(name=unsafe_name)
- self.post(
- '/projects',
- body={'project': ref})
-
- def test_create_project_with_parent_id_none_and_domain_id_none(self):
- """Call ``POST /projects``."""
- # Grant a domain role for the user
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
- self.put(member_url)
-
- # Create an authentication request for a domain scoped token
- auth = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain_id)
-
- # Without parent_id and domain_id passed as None, the domain_id should
- # be normalized to the domain on the token, when using a domain
- # scoped token.
- ref = unit.new_project_ref()
- r = self.post(
- '/projects',
- auth=auth,
- body={'project': ref})
- ref['domain_id'] = self.domain['id']
- self.assertValidProjectResponse(r, ref)
-
- def test_create_project_without_parent_id_and_without_domain_id(self):
- """Call ``POST /projects``."""
- # Grant a domain role for the user
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
- self.put(member_url)
-
- # Create an authentication request for a domain scoped token
- auth = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain_id)
-
- # Without domain_id and parent_id, the domain_id should be
- # normalized to the domain on the token, when using a domain
- # scoped token.
- ref = unit.new_project_ref()
- r = self.post(
- '/projects',
- auth=auth,
- body={'project': ref})
- ref['domain_id'] = self.domain['id']
- self.assertValidProjectResponse(r, ref)
-
- @test_utils.wip('waiting for support for parent_id to imply domain_id')
- def test_create_project_with_parent_id_and_no_domain_id(self):
- """Call ``POST /projects``."""
- # With only the parent_id, the domain_id should be
- # normalized to the parent's domain_id
- ref_child = unit.new_project_ref(parent_id=self.project['id'])
-
- r = self.post(
- '/projects',
- body={'project': ref_child})
- self.assertEqual(r.result['project']['domain_id'],
- self.project['domain_id'])
- ref_child['domain_id'] = self.domain['id']
- self.assertValidProjectResponse(r, ref_child)
-
- def _create_projects_hierarchy(self, hierarchy_size=1):
- """Creates a single-branched project hierarchy with the specified size.
-
- :param hierarchy_size: the desired hierarchy size, default is 1 -
- a project with one child.
-
- :returns projects: a list of the projects in the created hierarchy.
-
- """
- new_ref = unit.new_project_ref(domain_id=self.domain_id)
- resp = self.post('/projects', body={'project': new_ref})
-
- projects = [resp.result]
-
- for i in range(hierarchy_size):
- new_ref = unit.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[i]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
-
- projects.append(resp.result)
-
- return projects
-
- def test_list_projects_filtering_by_parent_id(self):
- """Call ``GET /projects?parent_id={project_id}``."""
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- # Add another child to projects[1] - it will be projects[3]
- new_ref = unit.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[1]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
-
- projects.append(resp.result)
-
- # Query for projects[0] immediate children - it will
- # be only projects[1]
- r = self.get(
- '/projects?parent_id=%(project_id)s' % {
- 'project_id': projects[0]['project']['id']})
- self.assertValidProjectListResponse(r)
-
- projects_result = r.result['projects']
- expected_list = [projects[1]['project']]
-
- # projects[0] has projects[1] as child
- self.assertEqual(expected_list, projects_result)
-
- # Query for projects[1] immediate children - it will
- # be projects[2] and projects[3]
- r = self.get(
- '/projects?parent_id=%(project_id)s' % {
- 'project_id': projects[1]['project']['id']})
- self.assertValidProjectListResponse(r)
-
- projects_result = r.result['projects']
- expected_list = [projects[2]['project'], projects[3]['project']]
-
- # projects[1] has projects[2] and projects[3] as children
- self.assertEqual(expected_list, projects_result)
-
- # Query for projects[2] immediate children - it will be an empty list
- r = self.get(
- '/projects?parent_id=%(project_id)s' % {
- 'project_id': projects[2]['project']['id']})
- self.assertValidProjectListResponse(r)
-
- projects_result = r.result['projects']
- expected_list = []
-
- # projects[2] has no child, projects_result must be an empty list
- self.assertEqual(expected_list, projects_result)
-
- def test_create_hierarchical_project(self):
- """Call ``POST /projects``."""
- self._create_projects_hierarchy()
-
- def test_get_project(self):
- """Call ``GET /projects/{project_id}``."""
- r = self.get(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id})
- self.assertValidProjectResponse(r, self.project)
-
- def test_get_project_with_parents_as_list_with_invalid_id(self):
- """Call ``GET /projects/{project_id}?parents_as_list``."""
- self.get('/projects/%(project_id)s?parents_as_list' % {
- 'project_id': None}, expected_status=http_client.NOT_FOUND)
-
- self.get('/projects/%(project_id)s?parents_as_list' % {
- 'project_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_get_project_with_subtree_as_list_with_invalid_id(self):
- """Call ``GET /projects/{project_id}?subtree_as_list``."""
- self.get('/projects/%(project_id)s?subtree_as_list' % {
- 'project_id': None}, expected_status=http_client.NOT_FOUND)
-
- self.get('/projects/%(project_id)s?subtree_as_list' % {
- 'project_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_get_project_with_parents_as_ids(self):
- """Call ``GET /projects/{project_id}?parents_as_ids``."""
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- # Query for projects[2] parents_as_ids
- r = self.get(
- '/projects/%(project_id)s?parents_as_ids' % {
- 'project_id': projects[2]['project']['id']})
-
- self.assertValidProjectResponse(r, projects[2]['project'])
- parents_as_ids = r.result['project']['parents']
-
- # Assert parents_as_ids is a structured dictionary correctly
- # representing the hierarchy. The request was made using projects[2]
- # id, hence its parents should be projects[1], projects[0] and the
- # is_domain_project, which is the root of the hierarchy. It should
- # have the following structure:
- # {
- # projects[1]: {
- # projects[0]: {
- # is_domain_project: None
- # }
- # }
- # }
- is_domain_project_id = projects[0]['project']['domain_id']
- expected_dict = {
- projects[1]['project']['id']: {
- projects[0]['project']['id']: {is_domain_project_id: None}
- }
- }
- self.assertDictEqual(expected_dict, parents_as_ids)
-
- # Query for projects[0] parents_as_ids
- r = self.get(
- '/projects/%(project_id)s?parents_as_ids' % {
- 'project_id': projects[0]['project']['id']})
-
- self.assertValidProjectResponse(r, projects[0]['project'])
- parents_as_ids = r.result['project']['parents']
-
- # projects[0] has only the project that acts as a domain as parent
- expected_dict = {
- is_domain_project_id: None
- }
- self.assertDictEqual(expected_dict, parents_as_ids)
-
- # Query for is_domain_project parents_as_ids
- r = self.get(
- '/projects/%(project_id)s?parents_as_ids' % {
- 'project_id': is_domain_project_id})
-
- parents_as_ids = r.result['project']['parents']
-
- # the project that acts as a domain has no parents, parents_as_ids
- # must be None
- self.assertIsNone(parents_as_ids)
-
- def test_get_project_with_parents_as_list_with_full_access(self):
- """``GET /projects/{project_id}?parents_as_list`` with full access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on each one of those projects;
- - Check that calling parents_as_list on 'subproject' returns both
- 'project' and 'parent'.
-
- """
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on all the created projects
- for proj in (parent, project, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?parents_as_list' %
- {'project_id': subproject['project']['id']})
- self.assertValidProjectResponse(r, subproject['project'])
-
- # Assert only 'project' and 'parent' are in the parents list
- self.assertIn(project, r.result['project']['parents'])
- self.assertIn(parent, r.result['project']['parents'])
- self.assertEqual(2, len(r.result['project']['parents']))
-
- def test_get_project_with_parents_as_list_with_partial_access(self):
- """``GET /projects/{project_id}?parents_as_list`` with partial access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on 'parent' and 'subproject';
- - Check that calling parents_as_list on 'subproject' only returns
- 'parent'.
-
- """
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on parent and subproject
- for proj in (parent, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?parents_as_list' %
- {'project_id': subproject['project']['id']})
- self.assertValidProjectResponse(r, subproject['project'])
-
- # Assert only 'parent' is in the parents list
- self.assertIn(parent, r.result['project']['parents'])
- self.assertEqual(1, len(r.result['project']['parents']))
-
- def test_get_project_with_parents_as_list_and_parents_as_ids(self):
- """Attempt to list a project's parents as both a list and as IDs.
-
- This uses ``GET /projects/{project_id}?parents_as_list&parents_as_ids``
- which should fail with a Bad Request due to the conflicting query
- strings.
-
- """
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- self.get(
- '/projects/%(project_id)s?parents_as_list&parents_as_ids' % {
- 'project_id': projects[1]['project']['id']},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_project_is_domain_filter(self):
- """Call ``GET /projects?is_domain=True/False``."""
- # Get the initial number of projects, both acting as a domain as well
- # as regular.
- r = self.get('/projects?is_domain=True', expected_status=200)
- initial_number_is_domain_true = len(r.result['projects'])
- r = self.get('/projects?is_domain=False', expected_status=200)
- initial_number_is_domain_false = len(r.result['projects'])
-
- # Add some more projects acting as domains
- new_is_domain_project = unit.new_project_ref(is_domain=True)
- new_is_domain_project = self.resource_api.create_project(
- new_is_domain_project['id'], new_is_domain_project)
- new_is_domain_project2 = unit.new_project_ref(is_domain=True)
- new_is_domain_project2 = self.resource_api.create_project(
- new_is_domain_project2['id'], new_is_domain_project2)
- number_is_domain_true = initial_number_is_domain_true + 2
-
- r = self.get('/projects?is_domain=True', expected_status=200)
- self.assertThat(r.result['projects'],
- matchers.HasLength(number_is_domain_true))
- self.assertIn(new_is_domain_project['id'],
- [p['id'] for p in r.result['projects']])
- self.assertIn(new_is_domain_project2['id'],
- [p['id'] for p in r.result['projects']])
-
- # Now add a regular project
- new_regular_project = unit.new_project_ref(domain_id=self.domain_id)
- new_regular_project = self.resource_api.create_project(
- new_regular_project['id'], new_regular_project)
- number_is_domain_false = initial_number_is_domain_false + 1
-
- # Check we still have the same number of projects acting as domains
- r = self.get('/projects?is_domain=True', expected_status=200)
- self.assertThat(r.result['projects'],
- matchers.HasLength(number_is_domain_true))
-
- # Check the number of regular projects is correct
- r = self.get('/projects?is_domain=False', expected_status=200)
- self.assertThat(r.result['projects'],
- matchers.HasLength(number_is_domain_false))
- self.assertIn(new_regular_project['id'],
- [p['id'] for p in r.result['projects']])
-
- def test_list_project_is_domain_filter_default(self):
- """Default project list should not see projects acting as domains"""
- # Get the initial count of regular projects
- r = self.get('/projects?is_domain=False', expected_status=200)
- number_is_domain_false = len(r.result['projects'])
-
- # Make sure we have at least one project acting as a domain
- new_is_domain_project = unit.new_project_ref(is_domain=True)
- new_is_domain_project = self.resource_api.create_project(
- new_is_domain_project['id'], new_is_domain_project)
-
- r = self.get('/projects', expected_status=200)
- self.assertThat(r.result['projects'],
- matchers.HasLength(number_is_domain_false))
- self.assertNotIn(new_is_domain_project, r.result['projects'])
-
- def test_get_project_with_subtree_as_ids(self):
- """Call ``GET /projects/{project_id}?subtree_as_ids``.
-
- This test creates a more complex hierarchy to test if the structured
- dictionary returned by using the ``subtree_as_ids`` query param
- correctly represents the hierarchy.
-
- The hierarchy contains 5 projects with the following structure::
-
- +--A--+
- | |
- +--B--+ C
- | |
- D E
-
-
- """
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- # Add another child to projects[0] - it will be projects[3]
- new_ref = unit.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[0]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
- projects.append(resp.result)
-
- # Add another child to projects[1] - it will be projects[4]
- new_ref = unit.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[1]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
- projects.append(resp.result)
-
- # Query for projects[0] subtree_as_ids
- r = self.get(
- '/projects/%(project_id)s?subtree_as_ids' % {
- 'project_id': projects[0]['project']['id']})
- self.assertValidProjectResponse(r, projects[0]['project'])
- subtree_as_ids = r.result['project']['subtree']
-
- # The subtree hierarchy from projects[0] should have the following
- # structure:
- # {
- # projects[1]: {
- # projects[2]: None,
- # projects[4]: None
- # },
- # projects[3]: None
- # }
- expected_dict = {
- projects[1]['project']['id']: {
- projects[2]['project']['id']: None,
- projects[4]['project']['id']: None
- },
- projects[3]['project']['id']: None
- }
- self.assertDictEqual(expected_dict, subtree_as_ids)
-
- # Now query for projects[1] subtree_as_ids
- r = self.get(
- '/projects/%(project_id)s?subtree_as_ids' % {
- 'project_id': projects[1]['project']['id']})
- self.assertValidProjectResponse(r, projects[1]['project'])
- subtree_as_ids = r.result['project']['subtree']
-
- # The subtree hierarchy from projects[1] should have the following
- # structure:
- # {
- # projects[2]: None,
- # projects[4]: None
- # }
- expected_dict = {
- projects[2]['project']['id']: None,
- projects[4]['project']['id']: None
- }
- self.assertDictEqual(expected_dict, subtree_as_ids)
-
- # Now query for projects[3] subtree_as_ids
- r = self.get(
- '/projects/%(project_id)s?subtree_as_ids' % {
- 'project_id': projects[3]['project']['id']})
- self.assertValidProjectResponse(r, projects[3]['project'])
- subtree_as_ids = r.result['project']['subtree']
-
- # projects[3] has no subtree, subtree_as_ids must be None
- self.assertIsNone(subtree_as_ids)
-
- def test_get_project_with_subtree_as_list_with_full_access(self):
- """``GET /projects/{project_id}?subtree_as_list`` with full access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on each one of those projects;
- - Check that calling subtree_as_list on 'parent' returns both 'parent'
- and 'subproject'.
-
- """
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on all the created projects
- for proj in (parent, project, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?subtree_as_list' %
- {'project_id': parent['project']['id']})
- self.assertValidProjectResponse(r, parent['project'])
-
- # Assert only 'project' and 'subproject' are in the subtree
- self.assertIn(project, r.result['project']['subtree'])
- self.assertIn(subproject, r.result['project']['subtree'])
- self.assertEqual(2, len(r.result['project']['subtree']))
-
- def test_get_project_with_subtree_as_list_with_partial_access(self):
- """``GET /projects/{project_id}?subtree_as_list`` with partial access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on 'parent' and 'subproject';
- - Check that calling subtree_as_list on 'parent' returns 'subproject'.
-
- """
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on parent and subproject
- for proj in (parent, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?subtree_as_list' %
- {'project_id': parent['project']['id']})
- self.assertValidProjectResponse(r, parent['project'])
-
- # Assert only 'subproject' is in the subtree
- self.assertIn(subproject, r.result['project']['subtree'])
- self.assertEqual(1, len(r.result['project']['subtree']))
-
- def test_get_project_with_subtree_as_list_and_subtree_as_ids(self):
- """Attempt to get a project subtree as both a list and as IDs.
-
- This uses ``GET /projects/{project_id}?subtree_as_list&subtree_as_ids``
- which should fail with a bad request due to the conflicting query
- strings.
-
- """
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- self.get(
- '/projects/%(project_id)s?subtree_as_list&subtree_as_ids' % {
- 'project_id': projects[1]['project']['id']},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_project(self):
- """Call ``PATCH /projects/{project_id}``."""
- ref = unit.new_project_ref(domain_id=self.domain_id,
- parent_id=self.project['parent_id'])
- del ref['id']
- r = self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id},
- body={'project': ref})
- self.assertValidProjectResponse(r, ref)
-
- def test_update_project_unsafe(self):
- """Call ``POST /projects/{project_id} with unsafe names``."""
- unsafe_name = 'i am not / safe'
-
- self.config_fixture.config(group='resource',
- project_name_url_safe='off')
- ref = unit.new_project_ref(name=unsafe_name,
- domain_id=self.domain_id,
- parent_id=self.project['parent_id'])
- del ref['id']
- self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id},
- body={'project': ref})
-
- unsafe_name = 'i am still not / safe'
- for config_setting in ['new', 'strict']:
- self.config_fixture.config(group='resource',
- project_name_url_safe=config_setting)
- ref = unit.new_project_ref(name=unsafe_name,
- domain_id=self.domain_id,
- parent_id=self.project['parent_id'])
- del ref['id']
- self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id},
- body={'project': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_project_unsafe_default(self):
- """Check default for unsafe names for ``POST /projects``."""
- unsafe_name = 'i am not / safe'
-
- # By default, we should be able to create unsafe names
- ref = unit.new_project_ref(name=unsafe_name,
- domain_id=self.domain_id,
- parent_id=self.project['parent_id'])
- del ref['id']
- self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id},
- body={'project': ref})
-
- def test_update_project_domain_id(self):
- """Call ``PATCH /projects/{project_id}`` with domain_id."""
- project = unit.new_project_ref(domain_id=self.domain['id'])
- project = self.resource_api.create_project(project['id'], project)
- project['domain_id'] = CONF.identity.default_domain_id
- r = self.patch('/projects/%(project_id)s' % {
- 'project_id': project['id']},
- body={'project': project},
- expected_status=exception.ValidationError.code)
- self.config_fixture.config(domain_id_immutable=False)
- project['domain_id'] = self.domain['id']
- r = self.patch('/projects/%(project_id)s' % {
- 'project_id': project['id']},
- body={'project': project})
- self.assertValidProjectResponse(r, project)
-
- def test_update_project_parent_id(self):
- """Call ``PATCH /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- leaf_project = projects[1]['project']
- leaf_project['parent_id'] = None
- self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': leaf_project['id']},
- body={'project': leaf_project},
- expected_status=http_client.FORBIDDEN)
-
- def test_update_project_is_domain_not_allowed(self):
- """Call ``PATCH /projects/{project_id}`` with is_domain.
-
- The is_domain flag is immutable.
- """
- project = unit.new_project_ref(domain_id=self.domain['id'])
- resp = self.post('/projects',
- body={'project': project})
- self.assertFalse(resp.result['project']['is_domain'])
-
- project['parent_id'] = resp.result['project']['parent_id']
- project['is_domain'] = True
- self.patch('/projects/%(project_id)s' % {
- 'project_id': resp.result['project']['id']},
- body={'project': project},
- expected_status=http_client.BAD_REQUEST)
-
- def test_disable_leaf_project(self):
- """Call ``PATCH /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- leaf_project = projects[1]['project']
- leaf_project['enabled'] = False
- r = self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': leaf_project['id']},
- body={'project': leaf_project})
- self.assertEqual(
- leaf_project['enabled'], r.result['project']['enabled'])
-
- def test_disable_not_leaf_project(self):
- """Call ``PATCH /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- root_project = projects[0]['project']
- root_project['enabled'] = False
- self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': root_project['id']},
- body={'project': root_project},
- expected_status=http_client.FORBIDDEN)
-
- def test_delete_project(self):
- """Call ``DELETE /projects/{project_id}``
-
- As well as making sure the delete succeeds, we ensure
- that any credentials that reference this projects are
- also deleted, while other credentials are unaffected.
-
- """
- credential = unit.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id)
- self.credential_api.create_credential(credential['id'], credential)
-
- # First check the credential for this project is present
- r = self.credential_api.get_credential(credential['id'])
- self.assertDictEqual(credential, r)
- # Create a second credential with a different project
- project2 = unit.new_project_ref(domain_id=self.domain['id'])
- self.resource_api.create_project(project2['id'], project2)
- credential2 = unit.new_credential_ref(user_id=self.user['id'],
- project_id=project2['id'])
- self.credential_api.create_credential(credential2['id'], credential2)
-
- # Now delete the project
- self.delete(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id})
-
- # Deleting the project should have deleted any credentials
- # that reference this project
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- credential_id=credential['id'])
- # But the credential for project2 is unaffected
- r = self.credential_api.get_credential(credential2['id'])
- self.assertDictEqual(credential2, r)
-
- def test_delete_not_leaf_project(self):
- """Call ``DELETE /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- self.delete(
- '/projects/%(project_id)s' % {
- 'project_id': projects[0]['project']['id']},
- expected_status=http_client.FORBIDDEN)
-
-
-class ResourceV3toV2MethodsTestCase(unit.TestCase):
- """Test domain V3 to V2 conversion methods."""
-
- def _setup_initial_projects(self):
- self.project_id = uuid.uuid4().hex
- self.domain_id = CONF.identity.default_domain_id
- self.parent_id = uuid.uuid4().hex
- # Project with only domain_id in ref
- self.project1 = unit.new_project_ref(id=self.project_id,
- name=self.project_id,
- domain_id=self.domain_id)
- # Project with both domain_id and parent_id in ref
- self.project2 = unit.new_project_ref(id=self.project_id,
- name=self.project_id,
- domain_id=self.domain_id,
- parent_id=self.parent_id)
- # Project with no domain_id and parent_id in ref
- self.project3 = unit.new_project_ref(id=self.project_id,
- name=self.project_id,
- domain_id=self.domain_id,
- parent_id=self.parent_id)
- # Expected result with no domain_id and parent_id
- self.expected_project = {'id': self.project_id,
- 'name': self.project_id}
-
- def test_v2controller_filter_domain_id(self):
- # V2.0 is not domain aware, ensure domain_id is popped off the ref.
- other_data = uuid.uuid4().hex
- domain_id = CONF.identity.default_domain_id
- ref = {'domain_id': domain_id,
- 'other_data': other_data}
-
- ref_no_domain = {'other_data': other_data}
- expected_ref = ref_no_domain.copy()
-
- updated_ref = controller.V2Controller.filter_domain_id(ref)
- self.assertIs(ref, updated_ref)
- self.assertDictEqual(expected_ref, ref)
- # Make sure we don't error/muck up data if domain_id isn't present
- updated_ref = controller.V2Controller.filter_domain_id(ref_no_domain)
- self.assertIs(ref_no_domain, updated_ref)
- self.assertDictEqual(expected_ref, ref_no_domain)
-
- def test_v3controller_filter_domain_id(self):
- # No data should be filtered out in this case.
- other_data = uuid.uuid4().hex
- domain_id = uuid.uuid4().hex
- ref = {'domain_id': domain_id,
- 'other_data': other_data}
-
- expected_ref = ref.copy()
- updated_ref = controller.V3Controller.filter_domain_id(ref)
- self.assertIs(ref, updated_ref)
- self.assertDictEqual(expected_ref, ref)
-
- def test_v2controller_filter_domain(self):
- other_data = uuid.uuid4().hex
- domain_id = uuid.uuid4().hex
- non_default_domain_ref = {'domain': {'id': domain_id},
- 'other_data': other_data}
- default_domain_ref = {'domain': {'id': 'default'},
- 'other_data': other_data}
- updated_ref = controller.V2Controller.filter_domain(default_domain_ref)
- self.assertNotIn('domain', updated_ref)
- self.assertNotIn(
- 'domain',
- controller.V2Controller.filter_domain(non_default_domain_ref))
-
- def test_v2controller_filter_project_parent_id(self):
- # V2.0 is not project hierarchy aware, ensure parent_id is popped off.
- other_data = uuid.uuid4().hex
- parent_id = uuid.uuid4().hex
- ref = {'parent_id': parent_id,
- 'other_data': other_data}
-
- ref_no_parent = {'other_data': other_data}
- expected_ref = ref_no_parent.copy()
-
- updated_ref = controller.V2Controller.filter_project_parent_id(ref)
- self.assertIs(ref, updated_ref)
- self.assertDictEqual(expected_ref, ref)
- # Make sure we don't error/muck up data if parent_id isn't present
- updated_ref = controller.V2Controller.filter_project_parent_id(
- ref_no_parent)
- self.assertIs(ref_no_parent, updated_ref)
- self.assertDictEqual(expected_ref, ref_no_parent)
-
- def test_v3_to_v2_project_method(self):
- self._setup_initial_projects()
-
- # TODO(shaleh): these optional fields are not handled well by the
- # v3_to_v2 code. Manually remove them for now. Eventually update
- # new_project_ref to not return optional values
- del self.project1['enabled']
- del self.project1['description']
- del self.project2['enabled']
- del self.project2['description']
- del self.project3['enabled']
- del self.project3['description']
-
- updated_project1 = controller.V2Controller.v3_to_v2_project(
- self.project1)
- self.assertIs(self.project1, updated_project1)
- self.assertDictEqual(self.expected_project, self.project1)
- updated_project2 = controller.V2Controller.v3_to_v2_project(
- self.project2)
- self.assertIs(self.project2, updated_project2)
- self.assertDictEqual(self.expected_project, self.project2)
- updated_project3 = controller.V2Controller.v3_to_v2_project(
- self.project3)
- self.assertIs(self.project3, updated_project3)
- self.assertDictEqual(self.expected_project, self.project2)
-
- def test_v3_to_v2_project_method_list(self):
- self._setup_initial_projects()
- project_list = [self.project1, self.project2, self.project3]
-
- # TODO(shaleh): these optional fields are not handled well by the
- # v3_to_v2 code. Manually remove them for now. Eventually update
- # new_project_ref to not return optional values
- for p in project_list:
- del p['enabled']
- del p['description']
- updated_list = controller.V2Controller.v3_to_v2_project(project_list)
-
- self.assertEqual(len(updated_list), len(project_list))
-
- for i, ref in enumerate(updated_list):
- # Order should not change.
- self.assertIs(ref, project_list[i])
-
- self.assertDictEqual(self.expected_project, self.project1)
- self.assertDictEqual(self.expected_project, self.project2)
- self.assertDictEqual(self.expected_project, self.project3)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_trust.py b/keystone-moon/keystone/tests/unit/test_v3_trust.py
deleted file mode 100644
index d3127c89..00000000
--- a/keystone-moon/keystone/tests/unit/test_v3_trust.py
+++ /dev/null
@@ -1,403 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import uuid
-
-from six.moves import http_client
-
-from keystone.tests import unit
-from keystone.tests.unit import test_v3
-
-
-class TestTrustOperations(test_v3.RestfulTestCase):
- """Test module for create, read, update and delete operations on trusts.
-
- This module is specific to tests for trust CRUD operations. All other tests
- related to trusts that are authentication or authorization specific should
- live in in the keystone/tests/unit/test_v3_auth.py module.
-
- """
-
- def setUp(self):
- super(TestTrustOperations, self).setUp()
- # create a trustee to delegate stuff to
- self.trustee_user = unit.create_user(self.identity_api,
- domain_id=self.domain_id)
- self.trustee_user_id = self.trustee_user['id']
-
- def test_create_trust_bad_request(self):
- # The server returns a 403 Forbidden rather than a 400 Bad Request, see
- # bug 1133435
- self.post('/OS-TRUST/trusts', body={'trust': {}},
- expected_status=http_client.FORBIDDEN)
-
- def test_trust_crud(self):
- # create a new trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r, ref)
-
- # get the trust
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
- self.assertValidTrustResponse(r, ref)
-
- # validate roles on the trust
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s/roles' % {
- 'trust_id': trust['id']})
- roles = self.assertValidRoleListResponse(r, self.role)
- self.assertIn(self.role['id'], [x['id'] for x in roles])
- self.head(
- '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
- 'trust_id': trust['id'],
- 'role_id': self.role['id']},
- expected_status=http_client.OK)
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
- 'trust_id': trust['id'],
- 'role_id': self.role['id']})
- self.assertValidRoleResponse(r, self.role)
-
- # list all trusts
- r = self.get('/OS-TRUST/trusts')
- self.assertValidTrustListResponse(r, trust)
-
- # trusts are immutable
- self.patch(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- # delete the trust
- self.delete(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
-
- # ensure the trust is not found
- self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=http_client.NOT_FOUND)
-
- def test_list_trusts(self):
- # create three trusts with the same trustor and trustee
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- for i in range(3):
- ref['expires_at'] = datetime.datetime.utcnow().replace(
- year=2032).strftime(unit.TIME_FORMAT)
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- self.assertValidTrustResponse(r, ref)
-
- # list all trusts
- r = self.get('/OS-TRUST/trusts')
- trusts = r.result['trusts']
- self.assertEqual(3, len(trusts))
- self.assertValidTrustListResponse(r)
-
- # list all trusts for the trustor
- r = self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
- self.user_id)
- trusts = r.result['trusts']
- self.assertEqual(3, len(trusts))
- self.assertValidTrustListResponse(r)
-
- # list all trusts as the trustor as the trustee.
- r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' %
- self.user_id)
- trusts = r.result['trusts']
- self.assertEqual(0, len(trusts))
-
- # list all trusts as the trustee is forbidden
- r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' %
- self.trustee_user_id,
- expected_status=http_client.FORBIDDEN)
-
- def test_delete_trust(self):
- # create a trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r, ref)
-
- # delete the trust
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust['id']})
-
- # ensure the trust isn't found
- self.get('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust['id']},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_without_trustee_returns_bad_request(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
-
- # trustee_user_id is required to create a trust
- del ref['trustee_user_id']
-
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_trust_without_impersonation_returns_bad_request(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
-
- # impersonation is required to create a trust
- del ref['impersonation']
-
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_trust_with_bad_remaining_uses_returns_bad_request(self):
- # negative numbers, strings, non-integers, and 0 are not value values
- for value in [-1, 0, "a bad value", 7.2]:
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- remaining_uses=value,
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_trust_with_non_existant_trustee_returns_not_found(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=uuid.uuid4().hex,
- project_id=self.project_id,
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_with_trustee_as_trustor_returns_forbidden(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.trustee_user_id,
- trustee_user_id=self.user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
- # NOTE(lbragstad): This fails because the user making the request isn't
- # the trustor defined in the request.
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.FORBIDDEN)
-
- def test_create_trust_with_non_existant_project_returns_not_found(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=uuid.uuid4().hex,
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_with_non_existant_role_id_returns_not_found(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[uuid.uuid4().hex])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_with_non_existant_role_name_returns_not_found(self):
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_names=[uuid.uuid4().hex])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_validate_trust_scoped_token_against_v2_returns_unauthorized(self):
- # create a new trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.default_domain_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- # get a v3 trust-scoped token as the trustee
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(
- r, self.default_domain_user)
- token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(
- path=path, token=self.get_admin_token(),
- method='GET', expected_status=http_client.UNAUTHORIZED)
-
- def test_v3_v2_intermix_trustor_not_in_default_domain_failed(self):
- # get a project-scoped token
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project_id)
- token = self.get_requested_token(auth_data)
-
- # create a new trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.default_domain_user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.default_domain_project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
- trust = self.assertValidTrustResponse(r)
-
- # get a trust-scoped token as the trustee
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(
- r, self.trustee_user)
- token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(
- path=path, token=self.get_admin_token(),
- method='GET', expected_status=http_client.UNAUTHORIZED)
-
- def test_v3_v2_intermix_project_not_in_default_domain_failed(self):
- # create a trustee in default domain to delegate stuff to
- trustee_user = unit.create_user(self.identity_api,
- domain_id=test_v3.DEFAULT_DOMAIN_ID)
- trustee_user_id = trustee_user['id']
-
- # create a new trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.default_domain_user_id,
- trustee_user_id=trustee_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- # get a project-scoped token as the default_domain_user
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project_id)
- token = self.get_requested_token(auth_data)
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
- trust = self.assertValidTrustResponse(r)
-
- # get a trust-scoped token as the trustee
- auth_data = self.build_authentication_request(
- user_id=trustee_user['id'],
- password=trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_create_token(auth_data)
- self.assertValidProjectScopedTokenResponse(r, trustee_user)
- token = r.headers.get('X-Subject-Token')
-
- # ensure the token is invalid against v2
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(
- path=path, token=self.get_admin_token(),
- method='GET', expected_status=http_client.UNAUTHORIZED)
-
- def test_exercise_trust_scoped_token_without_impersonation(self):
- # create a new trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- resp = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(resp)
-
- # get a trust-scoped token as the trustee
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- resp = self.v3_create_token(auth_data)
- resp_body = resp.json_body['token']
-
- self.assertValidProjectScopedTokenResponse(resp,
- self.trustee_user)
- self.assertEqual(self.trustee_user['id'], resp_body['user']['id'])
- self.assertEqual(self.trustee_user['name'], resp_body['user']['name'])
- self.assertEqual(self.domain['id'], resp_body['user']['domain']['id'])
- self.assertEqual(self.domain['name'],
- resp_body['user']['domain']['name'])
- self.assertEqual(self.project['id'], resp_body['project']['id'])
- self.assertEqual(self.project['name'], resp_body['project']['name'])
-
- def test_exercise_trust_scoped_token_with_impersonation(self):
- # create a new trust
- ref = unit.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- resp = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(resp)
-
- # get a trust-scoped token as the trustee
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- resp = self.v3_create_token(auth_data)
- resp_body = resp.json_body['token']
-
- self.assertValidProjectScopedTokenResponse(resp, self.user)
- self.assertEqual(self.user['id'], resp_body['user']['id'])
- self.assertEqual(self.user['name'], resp_body['user']['name'])
- self.assertEqual(self.domain['id'], resp_body['user']['domain']['id'])
- self.assertEqual(self.domain['name'],
- resp_body['user']['domain']['name'])
- self.assertEqual(self.project['id'], resp_body['project']['id'])
- self.assertEqual(self.project['name'], resp_body['project']['name'])
diff --git a/keystone-moon/keystone/tests/unit/test_validation.py b/keystone-moon/keystone/tests/unit/test_validation.py
deleted file mode 100644
index 73cb6ef6..00000000
--- a/keystone-moon/keystone/tests/unit/test_validation.py
+++ /dev/null
@@ -1,2115 +0,0 @@
-# -*- coding: utf-8 -*-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import uuid
-
-import six
-import testtools
-
-from keystone.assignment import schema as assignment_schema
-from keystone.catalog import schema as catalog_schema
-from keystone.common import validation
-from keystone.common.validation import parameter_types
-from keystone.common.validation import validators
-from keystone.credential import schema as credential_schema
-from keystone import exception
-from keystone.federation import schema as federation_schema
-from keystone.identity import schema as identity_schema
-from keystone.oauth1 import schema as oauth1_schema
-from keystone.policy import schema as policy_schema
-from keystone.resource import schema as resource_schema
-from keystone.tests import unit
-from keystone.trust import schema as trust_schema
-
-"""Example model to validate create requests against. Assume that this is
-the only backend for the create and validate schemas. This is just an
-example to show how a backend can be used to construct a schema. In
-Keystone, schemas are built according to the Identity API and the backends
-available in Keystone. This example does not mean that all schema in
-Keystone were strictly based on the SQL backends.
-
-class Entity(sql.ModelBase):
- __tablename__ = 'entity'
- attributes = ['id', 'name', 'domain_id', 'description']
- id = sql.Column(sql.String(64), primary_key=True)
- name = sql.Column(sql.String(255), nullable=False)
- description = sql.Column(sql.Text(), nullable=True)
- enabled = sql.Column(sql.Boolean, default=True, nullable=False)
- url = sql.Column(sql.String(225), nullable=True)
- email = sql.Column(sql.String(64), nullable=True)
-"""
-
-# Test schema to validate create requests against
-
-_entity_properties = {
- 'name': parameter_types.name,
- 'description': validation.nullable(parameter_types.description),
- 'enabled': parameter_types.boolean,
- 'url': validation.nullable(parameter_types.url),
- 'email': validation.nullable(parameter_types.email),
- 'id_string': validation.nullable(parameter_types.id_string)
-}
-
-entity_create = {
- 'type': 'object',
- 'properties': _entity_properties,
- 'required': ['name'],
- 'additionalProperties': True,
-}
-
-entity_create_optional_body = {
- 'type': 'object',
- 'properties': _entity_properties,
- 'additionalProperties': True,
-}
-
-entity_update = {
- 'type': 'object',
- 'properties': _entity_properties,
- 'minProperties': 1,
- 'additionalProperties': True,
-}
-
-_VALID_ENABLED_FORMATS = [True, False]
-
-_INVALID_ENABLED_FORMATS = ['some string', 1, 0, 'True', 'False']
-
-_INVALID_DESC_FORMATS = [False, 1, 2.0]
-
-_VALID_URLS = ['https://example.com', 'http://EXAMPLE.com/v3',
- 'http://localhost', 'http://127.0.0.1:5000',
- 'http://1.1.1.1', 'http://255.255.255.255',
- 'http://[::1]', 'http://[::1]:35357',
- 'http://[1::8]', 'http://[fe80::8%25eth0]',
- 'http://[::1.2.3.4]', 'http://[2001:DB8::1.2.3.4]',
- 'http://[::a:1.2.3.4]', 'http://[a::b:1.2.3.4]',
- 'http://[1:2:3:4:5:6:7:8]', 'http://[1:2:3:4:5:6:1.2.3.4]',
- 'http://[abcd:efAB:CDEF:1111:9999::]']
-
-_INVALID_URLS = [False, 'this is not a URL', 1234, 'www.example.com',
- 'localhost', 'http//something.com',
- 'https//something.com', ' http://example.com']
-
-_VALID_FILTERS = [{'interface': 'admin'},
- {'region': 'US-WEST',
- 'interface': 'internal'}]
-
-_INVALID_FILTERS = ['some string', 1, 0, True, False]
-
-
-def expected_validation_failure(msg):
- def wrapper(f):
- def wrapped(self, *args, **kwargs):
- args = (self,) + args
- e = self.assertRaises(exception.ValidationError, f,
- *args, **kwargs)
- self.assertIn(msg, six.text_type(e))
- return wrapped
- return wrapper
-
-
-class ValidatedDecoratorTests(unit.BaseTestCase):
-
- entity_schema = {
- 'type': 'object',
- 'properties': {
- 'name': parameter_types.name,
- },
- 'required': ['name'],
- }
-
- valid_entity = {
- 'name': uuid.uuid4().hex,
- }
-
- invalid_entity = {
- 'name': 1.0, # NOTE(dstanek): this is the incorrect type for name
- }
-
- @validation.validated(entity_create, 'entity')
- def create_entity(self, entity):
- """Used to test cases where validated param is the only param."""
-
- @validation.validated(entity_create_optional_body, 'entity')
- def create_entity_optional_body(self, entity):
- """Used to test cases where there is an optional body."""
-
- @validation.validated(entity_update, 'entity')
- def update_entity(self, entity_id, entity):
- """Used to test cases where validated param is not the only param."""
-
- def test_calling_create_with_valid_entity_kwarg_succeeds(self):
- self.create_entity(entity=self.valid_entity)
-
- def test_calling_create_with_empty_entity_kwarg_succeeds(self):
- """Test the case when client passing in an empty kwarg reference."""
- self.create_entity_optional_body(entity={})
-
- @expected_validation_failure('Expecting to find entity in request body')
- def test_calling_create_with_kwarg_as_None_fails(self):
- self.create_entity(entity=None)
-
- def test_calling_create_with_valid_entity_arg_succeeds(self):
- self.create_entity(self.valid_entity)
-
- def test_calling_create_with_empty_entity_arg_succeeds(self):
- """Test the case when client passing in an empty entity reference."""
- self.create_entity_optional_body({})
-
- @expected_validation_failure("Invalid input for field 'name'")
- def test_calling_create_with_invalid_entity_fails(self):
- self.create_entity(self.invalid_entity)
-
- @expected_validation_failure('Expecting to find entity in request body')
- def test_calling_create_with_entity_arg_as_None_fails(self):
- self.create_entity(None)
-
- @expected_validation_failure('Expecting to find entity in request body')
- def test_calling_create_without_an_entity_fails(self):
- self.create_entity()
-
- def test_using_the_wrong_name_with_the_decorator_fails(self):
- with testtools.ExpectedException(TypeError):
- @validation.validated(self.entity_schema, 'entity_')
- def function(entity):
- pass
-
- # NOTE(dstanek): below are the test cases for making sure the validation
- # works when the validated param is not the only param. Since all of the
- # actual validation cases are tested above these test are for a sanity
- # check.
-
- def test_calling_update_with_valid_entity_succeeds(self):
- self.update_entity(uuid.uuid4().hex, self.valid_entity)
-
- @expected_validation_failure("Invalid input for field 'name'")
- def test_calling_update_with_invalid_entity_fails(self):
- self.update_entity(uuid.uuid4().hex, self.invalid_entity)
-
- def test_calling_update_with_empty_entity_kwarg_succeeds(self):
- """Test the case when client passing in an empty entity reference."""
- global entity_update
- original_entity_update = entity_update.copy()
- # pop 'minProperties' from schema so that empty body is allowed.
- entity_update.pop('minProperties')
- self.update_entity(uuid.uuid4().hex, entity={})
- entity_update = original_entity_update
-
-
-class EntityValidationTestCase(unit.BaseTestCase):
-
- def setUp(self):
- super(EntityValidationTestCase, self).setUp()
- self.resource_name = 'some resource name'
- self.description = 'Some valid description'
- self.valid_enabled = True
- self.valid_url = 'http://example.com'
- self.valid_email = 'joe@example.com'
- self.create_schema_validator = validators.SchemaValidator(
- entity_create)
- self.update_schema_validator = validators.SchemaValidator(
- entity_update)
-
- def test_create_entity_with_all_valid_parameters_validates(self):
- """Validate all parameter values against test schema."""
- request_to_validate = {'name': self.resource_name,
- 'description': self.description,
- 'enabled': self.valid_enabled,
- 'url': self.valid_url,
- 'email': self.valid_email}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_only_required_valid_parameters_validates(self):
- """Validate correct for only parameters values against test schema."""
- request_to_validate = {'name': self.resource_name}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_name_too_long_raises_exception(self):
- """Validate long names.
-
- Validate that an exception is raised when validating a string of 255+
- characters passed in as a name.
- """
- invalid_name = 'a' * 256
- request_to_validate = {'name': invalid_name}
- self.assertRaises(exception.SchemaValidationError,
- self.create_schema_validator.validate,
- request_to_validate)
-
- def test_create_entity_with_name_too_short_raises_exception(self):
- """Validate short names.
-
- Test that an exception is raised when passing a string of length
- zero as a name parameter.
- """
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.create_schema_validator.validate,
- request_to_validate)
-
- def test_create_entity_with_unicode_name_validates(self):
- """Test that we successfully validate a unicode string."""
- request_to_validate = {'name': u'αβγδ'}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_invalid_enabled_format_raises_exception(self):
- """Validate invalid enabled formats.
-
- Test that an exception is raised when passing invalid boolean-like
- values as `enabled`.
- """
- for format in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.resource_name,
- 'enabled': format}
- self.assertRaises(exception.SchemaValidationError,
- self.create_schema_validator.validate,
- request_to_validate)
-
- def test_create_entity_with_valid_enabled_formats_validates(self):
- """Validate valid enabled formats.
-
- Test that we have successful validation on boolean values for
- `enabled`.
- """
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.resource_name,
- 'enabled': valid_enabled}
- # Make sure validation doesn't raise a validation exception
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_valid_urls_validates(self):
- """Test that proper urls are successfully validated."""
- for valid_url in _VALID_URLS:
- request_to_validate = {'name': self.resource_name,
- 'url': valid_url}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_invalid_urls_fails(self):
- """Test that an exception is raised when validating improper urls."""
- for invalid_url in _INVALID_URLS:
- request_to_validate = {'name': self.resource_name,
- 'url': invalid_url}
- self.assertRaises(exception.SchemaValidationError,
- self.create_schema_validator.validate,
- request_to_validate)
-
- def test_create_entity_with_valid_email_validates(self):
- """Validate email address
-
- Test that we successfully validate properly formatted email
- addresses.
- """
- request_to_validate = {'name': self.resource_name,
- 'email': self.valid_email}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_invalid_email_fails(self):
- """Validate invalid email address.
-
- Test that an exception is raised when validating improperly
- formatted email addresses.
- """
- request_to_validate = {'name': self.resource_name,
- 'email': 'some invalid email value'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_schema_validator.validate,
- request_to_validate)
-
- def test_create_entity_with_valid_id_strings(self):
- """Validate acceptable id strings."""
- valid_id_strings = [str(uuid.uuid4()), uuid.uuid4().hex, 'default']
- for valid_id in valid_id_strings:
- request_to_validate = {'name': self.resource_name,
- 'id_string': valid_id}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_invalid_id_strings(self):
- """Exception raised when using invalid id strings."""
- long_string = 'A' * 65
- invalid_id_strings = ['', long_string]
- for invalid_id in invalid_id_strings:
- request_to_validate = {'name': self.resource_name,
- 'id_string': invalid_id}
- self.assertRaises(exception.SchemaValidationError,
- self.create_schema_validator.validate,
- request_to_validate)
-
- def test_create_entity_with_null_id_string(self):
- """Validate that None is an acceptable optional string type."""
- request_to_validate = {'name': self.resource_name,
- 'id_string': None}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_create_entity_with_null_string_succeeds(self):
- """Exception raised when passing None on required id strings."""
- request_to_validate = {'name': self.resource_name,
- 'id_string': None}
- self.create_schema_validator.validate(request_to_validate)
-
- def test_update_entity_with_no_parameters_fails(self):
- """At least one parameter needs to be present for an update."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_schema_validator.validate,
- request_to_validate)
-
- def test_update_entity_with_all_parameters_valid_validates(self):
- """Simulate updating an entity by ID."""
- request_to_validate = {'name': self.resource_name,
- 'description': self.description,
- 'enabled': self.valid_enabled,
- 'url': self.valid_url,
- 'email': self.valid_email}
- self.update_schema_validator.validate(request_to_validate)
-
- def test_update_entity_with_a_valid_required_parameter_validates(self):
- """Succeed if a valid required parameter is provided."""
- request_to_validate = {'name': self.resource_name}
- self.update_schema_validator.validate(request_to_validate)
-
- def test_update_entity_with_invalid_required_parameter_fails(self):
- """Fail if a provided required parameter is invalid."""
- request_to_validate = {'name': 'a' * 256}
- self.assertRaises(exception.SchemaValidationError,
- self.update_schema_validator.validate,
- request_to_validate)
-
- def test_update_entity_with_a_null_optional_parameter_validates(self):
- """Optional parameters can be null to removed the value."""
- request_to_validate = {'email': None}
- self.update_schema_validator.validate(request_to_validate)
-
- def test_update_entity_with_a_required_null_parameter_fails(self):
- """The `name` parameter can't be null."""
- request_to_validate = {'name': None}
- self.assertRaises(exception.SchemaValidationError,
- self.update_schema_validator.validate,
- request_to_validate)
-
- def test_update_entity_with_a_valid_optional_parameter_validates(self):
- """Succeeds with only a single valid optional parameter."""
- request_to_validate = {'email': self.valid_email}
- self.update_schema_validator.validate(request_to_validate)
-
- def test_update_entity_with_invalid_optional_parameter_fails(self):
- """Fails when an optional parameter is invalid."""
- request_to_validate = {'email': 0}
- self.assertRaises(exception.SchemaValidationError,
- self.update_schema_validator.validate,
- request_to_validate)
-
-
-class ProjectValidationTestCase(unit.BaseTestCase):
- """Test for V3 Project API validation."""
-
- def setUp(self):
- super(ProjectValidationTestCase, self).setUp()
-
- self.project_name = 'My Project'
-
- create = resource_schema.project_create
- update = resource_schema.project_update
- self.create_project_validator = validators.SchemaValidator(create)
- self.update_project_validator = validators.SchemaValidator(update)
-
- def test_validate_project_request(self):
- """Test that we validate a project with `name` in request."""
- request_to_validate = {'name': self.project_name}
- self.create_project_validator.validate(request_to_validate)
-
- def test_validate_project_request_without_name_fails(self):
- """Validate project request fails without name."""
- request_to_validate = {'enabled': True}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_request_with_enabled(self):
- """Validate `enabled` as boolean-like values for projects."""
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.project_name,
- 'enabled': valid_enabled}
- self.create_project_validator.validate(request_to_validate)
-
- def test_validate_project_request_with_invalid_enabled_fails(self):
- """Exception is raised when `enabled` isn't a boolean-like value."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.project_name,
- 'enabled': invalid_enabled}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_request_with_valid_description(self):
- """Test that we validate `description` in create project requests."""
- request_to_validate = {'name': self.project_name,
- 'description': 'My Project'}
- self.create_project_validator.validate(request_to_validate)
-
- def test_validate_project_request_with_invalid_description_fails(self):
- """Exception is raised when `description` as a non-string value."""
- request_to_validate = {'name': self.project_name,
- 'description': False}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_request_with_name_too_long(self):
- """Exception is raised when `name` is too long."""
- long_project_name = 'a' * 65
- request_to_validate = {'name': long_project_name}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_request_with_name_too_short(self):
- """Exception raised when `name` is too short."""
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_request_with_valid_parent_id(self):
- """Test that we validate `parent_id` in create project requests."""
- # parent_id is nullable
- request_to_validate = {'name': self.project_name,
- 'parent_id': None}
- self.create_project_validator.validate(request_to_validate)
- request_to_validate = {'name': self.project_name,
- 'parent_id': uuid.uuid4().hex}
- self.create_project_validator.validate(request_to_validate)
-
- def test_validate_project_request_with_invalid_parent_id_fails(self):
- """Exception is raised when `parent_id` as a non-id value."""
- request_to_validate = {'name': self.project_name,
- 'parent_id': False}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
- request_to_validate = {'name': self.project_name,
- 'parent_id': 'fake project'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_update_request(self):
- """Test that we validate a project update request."""
- request_to_validate = {'domain_id': uuid.uuid4().hex}
- self.update_project_validator.validate(request_to_validate)
-
- def test_validate_project_update_request_with_no_parameters_fails(self):
- """Exception is raised when updating project without parameters."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_update_request_with_name_too_long_fails(self):
- """Exception raised when updating a project with `name` too long."""
- long_project_name = 'a' * 65
- request_to_validate = {'name': long_project_name}
- self.assertRaises(exception.SchemaValidationError,
- self.update_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_update_request_with_name_too_short_fails(self):
- """Exception raised when updating a project with `name` too short."""
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.update_project_validator.validate,
- request_to_validate)
-
- def test_validate_project_create_request_with_valid_domain_id(self):
- """Test that we validate `domain_id` in create project requests."""
- # domain_id is nullable
- for domain_id in [None, uuid.uuid4().hex]:
- request_to_validate = {'name': self.project_name,
- 'domain_id': domain_id}
- self.create_project_validator.validate(request_to_validate)
-
- def test_validate_project_request_with_invalid_domain_id_fails(self):
- """Exception is raised when `domain_id` is a non-id value."""
- for domain_id in [False, 'fake_project']:
- request_to_validate = {'name': self.project_name,
- 'domain_id': domain_id}
- self.assertRaises(exception.SchemaValidationError,
- self.create_project_validator.validate,
- request_to_validate)
-
-
-class DomainValidationTestCase(unit.BaseTestCase):
- """Test for V3 Domain API validation."""
-
- def setUp(self):
- super(DomainValidationTestCase, self).setUp()
-
- self.domain_name = 'My Domain'
-
- create = resource_schema.domain_create
- update = resource_schema.domain_update
- self.create_domain_validator = validators.SchemaValidator(create)
- self.update_domain_validator = validators.SchemaValidator(update)
-
- def test_validate_domain_request(self):
- """Make sure we successfully validate a create domain request."""
- request_to_validate = {'name': self.domain_name}
- self.create_domain_validator.validate(request_to_validate)
-
- def test_validate_domain_request_without_name_fails(self):
- """Make sure we raise an exception when `name` isn't included."""
- request_to_validate = {'enabled': True}
- self.assertRaises(exception.SchemaValidationError,
- self.create_domain_validator.validate,
- request_to_validate)
-
- def test_validate_domain_request_with_enabled(self):
- """Validate `enabled` as boolean-like values for domains."""
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.domain_name,
- 'enabled': valid_enabled}
- self.create_domain_validator.validate(request_to_validate)
-
- def test_validate_domain_request_with_invalid_enabled_fails(self):
- """Exception is raised when `enabled` isn't a boolean-like value."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.domain_name,
- 'enabled': invalid_enabled}
- self.assertRaises(exception.SchemaValidationError,
- self.create_domain_validator.validate,
- request_to_validate)
-
- def test_validate_domain_request_with_valid_description(self):
- """Test that we validate `description` in create domain requests."""
- request_to_validate = {'name': self.domain_name,
- 'description': 'My Domain'}
- self.create_domain_validator.validate(request_to_validate)
-
- def test_validate_domain_request_with_invalid_description_fails(self):
- """Exception is raised when `description` is a non-string value."""
- request_to_validate = {'name': self.domain_name,
- 'description': False}
- self.assertRaises(exception.SchemaValidationError,
- self.create_domain_validator.validate,
- request_to_validate)
-
- def test_validate_domain_request_with_name_too_long(self):
- """Exception is raised when `name` is too long."""
- long_domain_name = 'a' * 65
- request_to_validate = {'name': long_domain_name}
- self.assertRaises(exception.SchemaValidationError,
- self.create_domain_validator.validate,
- request_to_validate)
-
- def test_validate_domain_request_with_name_too_short(self):
- """Exception raised when `name` is too short."""
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.create_domain_validator.validate,
- request_to_validate)
-
- def test_validate_domain_update_request(self):
- """Test that we validate a domain update request."""
- request_to_validate = {'domain_id': uuid.uuid4().hex}
- self.update_domain_validator.validate(request_to_validate)
-
- def test_validate_domain_update_request_with_no_parameters_fails(self):
- """Exception is raised when updating a domain without parameters."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_domain_validator.validate,
- request_to_validate)
-
- def test_validate_domain_update_request_with_name_too_long_fails(self):
- """Exception raised when updating a domain with `name` too long."""
- long_domain_name = 'a' * 65
- request_to_validate = {'name': long_domain_name}
- self.assertRaises(exception.SchemaValidationError,
- self.update_domain_validator.validate,
- request_to_validate)
-
- def test_validate_domain_update_request_with_name_too_short_fails(self):
- """Exception raised when updating a domain with `name` too short."""
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.update_domain_validator.validate,
- request_to_validate)
-
-
-class RoleValidationTestCase(unit.BaseTestCase):
- """Test for V3 Role API validation."""
-
- def setUp(self):
- super(RoleValidationTestCase, self).setUp()
-
- self.role_name = 'My Role'
-
- create = assignment_schema.role_create
- update = assignment_schema.role_update
- self.create_role_validator = validators.SchemaValidator(create)
- self.update_role_validator = validators.SchemaValidator(update)
-
- def test_validate_role_request(self):
- """Test we can successfully validate a create role request."""
- request_to_validate = {'name': self.role_name}
- self.create_role_validator.validate(request_to_validate)
-
- def test_validate_role_create_without_name_raises_exception(self):
- """Test that we raise an exception when `name` isn't included."""
- request_to_validate = {'enabled': True}
- self.assertRaises(exception.SchemaValidationError,
- self.create_role_validator.validate,
- request_to_validate)
-
- def test_validate_role_create_when_name_is_not_string_fails(self):
- """Exception is raised on role create with a non-string `name`."""
- request_to_validate = {'name': True}
- self.assertRaises(exception.SchemaValidationError,
- self.create_role_validator.validate,
- request_to_validate)
- request_to_validate = {'name': 24}
- self.assertRaises(exception.SchemaValidationError,
- self.create_role_validator.validate,
- request_to_validate)
-
- def test_validate_role_update_request(self):
- """Test that we validate a role update request."""
- request_to_validate = {'name': 'My New Role'}
- self.update_role_validator.validate(request_to_validate)
-
- def test_validate_role_update_fails_with_invalid_name_fails(self):
- """Exception when validating an update request with invalid `name`."""
- request_to_validate = {'name': True}
- self.assertRaises(exception.SchemaValidationError,
- self.update_role_validator.validate,
- request_to_validate)
-
- request_to_validate = {'name': 24}
- self.assertRaises(exception.SchemaValidationError,
- self.update_role_validator.validate,
- request_to_validate)
-
-
-class PolicyValidationTestCase(unit.BaseTestCase):
- """Test for V3 Policy API validation."""
-
- def setUp(self):
- super(PolicyValidationTestCase, self).setUp()
-
- create = policy_schema.policy_create
- update = policy_schema.policy_update
- self.create_policy_validator = validators.SchemaValidator(create)
- self.update_policy_validator = validators.SchemaValidator(update)
-
- def test_validate_policy_succeeds(self):
- """Test that we validate a create policy request."""
- request_to_validate = {'blob': 'some blob information',
- 'type': 'application/json'}
- self.create_policy_validator.validate(request_to_validate)
-
- def test_validate_policy_without_blob_fails(self):
- """Exception raised without `blob` in request."""
- request_to_validate = {'type': 'application/json'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_policy_validator.validate,
- request_to_validate)
-
- def test_validate_policy_without_type_fails(self):
- """Exception raised without `type` in request."""
- request_to_validate = {'blob': 'some blob information'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_policy_validator.validate,
- request_to_validate)
-
- def test_validate_policy_create_with_extra_parameters_succeeds(self):
- """Validate policy create with extra parameters."""
- request_to_validate = {'blob': 'some blob information',
- 'type': 'application/json',
- 'extra': 'some extra stuff'}
- self.create_policy_validator.validate(request_to_validate)
-
- def test_validate_policy_create_with_invalid_type_fails(self):
- """Exception raised when `blob` and `type` are boolean."""
- for prop in ['blob', 'type']:
- request_to_validate = {prop: False}
- self.assertRaises(exception.SchemaValidationError,
- self.create_policy_validator.validate,
- request_to_validate)
-
- def test_validate_policy_update_without_parameters_fails(self):
- """Exception raised when updating policy without parameters."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_policy_validator.validate,
- request_to_validate)
-
- def test_validate_policy_update_with_extra_parameters_succeeds(self):
- """Validate policy update request with extra parameters."""
- request_to_validate = {'blob': 'some blob information',
- 'type': 'application/json',
- 'extra': 'some extra stuff'}
- self.update_policy_validator.validate(request_to_validate)
-
- def test_validate_policy_update_succeeds(self):
- """Test that we validate a policy update request."""
- request_to_validate = {'blob': 'some blob information',
- 'type': 'application/json'}
- self.update_policy_validator.validate(request_to_validate)
-
- def test_validate_policy_update_with_invalid_type_fails(self):
- """Exception raised when invalid `type` on policy update."""
- for prop in ['blob', 'type']:
- request_to_validate = {prop: False}
- self.assertRaises(exception.SchemaValidationError,
- self.update_policy_validator.validate,
- request_to_validate)
-
-
-class CredentialValidationTestCase(unit.BaseTestCase):
- """Test for V3 Credential API validation."""
-
- def setUp(self):
- super(CredentialValidationTestCase, self).setUp()
-
- create = credential_schema.credential_create
- update = credential_schema.credential_update
- self.create_credential_validator = validators.SchemaValidator(create)
- self.update_credential_validator = validators.SchemaValidator(update)
-
- def test_validate_credential_succeeds(self):
- """Test that we validate a credential request."""
- request_to_validate = {'blob': 'some string',
- 'project_id': uuid.uuid4().hex,
- 'type': 'ec2',
- 'user_id': uuid.uuid4().hex}
- self.create_credential_validator.validate(request_to_validate)
-
- def test_validate_credential_without_blob_fails(self):
- """Exception raised without `blob` in create request."""
- request_to_validate = {'type': 'ec2',
- 'user_id': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_credential_validator.validate,
- request_to_validate)
-
- def test_validate_credential_without_user_id_fails(self):
- """Exception raised without `user_id` in create request."""
- request_to_validate = {'blob': 'some credential blob',
- 'type': 'ec2'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_credential_validator.validate,
- request_to_validate)
-
- def test_validate_credential_without_type_fails(self):
- """Exception raised without `type` in create request."""
- request_to_validate = {'blob': 'some credential blob',
- 'user_id': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_credential_validator.validate,
- request_to_validate)
-
- def test_validate_credential_ec2_without_project_id_fails(self):
- """Validate `project_id` is required for ec2.
-
- Test that a SchemaValidationError is raised when type is ec2
- and no `project_id` is provided in create request.
- """
- request_to_validate = {'blob': 'some credential blob',
- 'type': 'ec2',
- 'user_id': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_credential_validator.validate,
- request_to_validate)
-
- def test_validate_credential_with_project_id_succeeds(self):
- """Test that credential request works for all types."""
- cred_types = ['ec2', 'cert', uuid.uuid4().hex]
-
- for c_type in cred_types:
- request_to_validate = {'blob': 'some blob',
- 'project_id': uuid.uuid4().hex,
- 'type': c_type,
- 'user_id': uuid.uuid4().hex}
- # Make sure an exception isn't raised
- self.create_credential_validator.validate(request_to_validate)
-
- def test_validate_credential_non_ec2_without_project_id_succeeds(self):
- """Validate `project_id` is not required for non-ec2.
-
- Test that create request without `project_id` succeeds for any
- non-ec2 credential.
- """
- cred_types = ['cert', uuid.uuid4().hex]
-
- for c_type in cred_types:
- request_to_validate = {'blob': 'some blob',
- 'type': c_type,
- 'user_id': uuid.uuid4().hex}
- # Make sure an exception isn't raised
- self.create_credential_validator.validate(request_to_validate)
-
- def test_validate_credential_with_extra_parameters_succeeds(self):
- """Validate create request with extra parameters."""
- request_to_validate = {'blob': 'some string',
- 'extra': False,
- 'project_id': uuid.uuid4().hex,
- 'type': 'ec2',
- 'user_id': uuid.uuid4().hex}
- self.create_credential_validator.validate(request_to_validate)
-
- def test_validate_credential_update_succeeds(self):
- """Test that a credential request is properly validated."""
- request_to_validate = {'blob': 'some string',
- 'project_id': uuid.uuid4().hex,
- 'type': 'ec2',
- 'user_id': uuid.uuid4().hex}
- self.update_credential_validator.validate(request_to_validate)
-
- def test_validate_credential_update_without_parameters_fails(self):
- """Exception is raised on update without parameters."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_credential_validator.validate,
- request_to_validate)
-
- def test_validate_credential_update_with_extra_parameters_succeeds(self):
- """Validate credential update with extra parameters."""
- request_to_validate = {'blob': 'some string',
- 'extra': False,
- 'project_id': uuid.uuid4().hex,
- 'type': 'ec2',
- 'user_id': uuid.uuid4().hex}
- self.update_credential_validator.validate(request_to_validate)
-
-
-class RegionValidationTestCase(unit.BaseTestCase):
- """Test for V3 Region API validation."""
-
- def setUp(self):
- super(RegionValidationTestCase, self).setUp()
-
- self.region_name = 'My Region'
-
- create = catalog_schema.region_create
- update = catalog_schema.region_update
- self.create_region_validator = validators.SchemaValidator(create)
- self.update_region_validator = validators.SchemaValidator(update)
-
- def test_validate_region_request(self):
- """Test that we validate a basic region request."""
- # Create_region doesn't take any parameters in the request so let's
- # make sure we cover that case.
- request_to_validate = {}
- self.create_region_validator.validate(request_to_validate)
-
- def test_validate_region_create_request_with_parameters(self):
- """Test that we validate a region request with parameters."""
- request_to_validate = {'id': 'us-east',
- 'description': 'US East Region',
- 'parent_region_id': 'US Region'}
- self.create_region_validator.validate(request_to_validate)
-
- def test_validate_region_create_with_uuid(self):
- """Test that we validate a region request with a UUID as the id."""
- request_to_validate = {'id': uuid.uuid4().hex,
- 'description': 'US East Region',
- 'parent_region_id': uuid.uuid4().hex}
- self.create_region_validator.validate(request_to_validate)
-
- def test_validate_region_create_fails_with_invalid_region_id(self):
- """Exception raised when passing invalid `id` in request."""
- request_to_validate = {'id': 1234,
- 'description': 'US East Region'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_region_validator.validate,
- request_to_validate)
-
- def test_validate_region_create_succeeds_with_extra_parameters(self):
- """Validate create region request with extra values."""
- request_to_validate = {'other_attr': uuid.uuid4().hex}
- self.create_region_validator.validate(request_to_validate)
-
- def test_validate_region_create_succeeds_with_no_parameters(self):
- """Validate create region request with no parameters."""
- request_to_validate = {}
- self.create_region_validator.validate(request_to_validate)
-
- def test_validate_region_update_succeeds(self):
- """Test that we validate a region update request."""
- request_to_validate = {'id': 'us-west',
- 'description': 'US West Region',
- 'parent_region_id': 'us-region'}
- self.update_region_validator.validate(request_to_validate)
-
- def test_validate_region_update_succeeds_with_extra_parameters(self):
- """Validate extra attributes in the region update request."""
- request_to_validate = {'other_attr': uuid.uuid4().hex}
- self.update_region_validator.validate(request_to_validate)
-
- def test_validate_region_update_fails_with_no_parameters(self):
- """Exception raised when passing no parameters in a region update."""
- # An update request should consist of at least one value to update
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_region_validator.validate,
- request_to_validate)
-
-
-class ServiceValidationTestCase(unit.BaseTestCase):
- """Test for V3 Service API validation."""
-
- def setUp(self):
- super(ServiceValidationTestCase, self).setUp()
-
- create = catalog_schema.service_create
- update = catalog_schema.service_update
- self.create_service_validator = validators.SchemaValidator(create)
- self.update_service_validator = validators.SchemaValidator(update)
-
- def test_validate_service_create_succeeds(self):
- """Test that we validate a service create request."""
- request_to_validate = {'name': 'Nova',
- 'description': 'OpenStack Compute Service',
- 'enabled': True,
- 'type': 'compute'}
- self.create_service_validator.validate(request_to_validate)
-
- def test_validate_service_create_succeeds_with_required_parameters(self):
- """Validate a service create request with the required parameters."""
- # The only parameter type required for service creation is 'type'
- request_to_validate = {'type': 'compute'}
- self.create_service_validator.validate(request_to_validate)
-
- def test_validate_service_create_fails_without_type(self):
- """Exception raised when trying to create a service without `type`."""
- request_to_validate = {'name': 'Nova'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_create_succeeds_with_extra_parameters(self):
- """Test that extra parameters pass validation on create service."""
- request_to_validate = {'other_attr': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex}
- self.create_service_validator.validate(request_to_validate)
-
- def test_validate_service_create_succeeds_with_valid_enabled(self):
- """Validate boolean values as enabled values on service create."""
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': valid_enabled,
- 'type': uuid.uuid4().hex}
- self.create_service_validator.validate(request_to_validate)
-
- def test_validate_service_create_fails_with_invalid_enabled(self):
- """Exception raised when boolean-like parameters as `enabled`
-
- On service create, make sure an exception is raised if `enabled` is
- not a boolean value.
- """
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': invalid_enabled,
- 'type': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_create_fails_when_name_too_long(self):
- """Exception raised when `name` is greater than 255 characters."""
- long_name = 'a' * 256
- request_to_validate = {'type': 'compute',
- 'name': long_name}
- self.assertRaises(exception.SchemaValidationError,
- self.create_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_create_fails_when_name_too_short(self):
- """Exception is raised when `name` is too short."""
- request_to_validate = {'type': 'compute',
- 'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.create_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_create_fails_when_type_too_long(self):
- """Exception is raised when `type` is too long."""
- long_type_name = 'a' * 256
- request_to_validate = {'type': long_type_name}
- self.assertRaises(exception.SchemaValidationError,
- self.create_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_create_fails_when_type_too_short(self):
- """Exception is raised when `type` is too short."""
- request_to_validate = {'type': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.create_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_update_request_succeeds(self):
- """Test that we validate a service update request."""
- request_to_validate = {'name': 'Cinder',
- 'type': 'volume',
- 'description': 'OpenStack Block Storage',
- 'enabled': False}
- self.update_service_validator.validate(request_to_validate)
-
- def test_validate_service_update_fails_with_no_parameters(self):
- """Exception raised when updating a service without values."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_update_succeeds_with_extra_parameters(self):
- """Validate updating a service with extra parameters."""
- request_to_validate = {'other_attr': uuid.uuid4().hex}
- self.update_service_validator.validate(request_to_validate)
-
- def test_validate_service_update_succeeds_with_valid_enabled(self):
- """Validate boolean formats as `enabled` on service update."""
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': valid_enabled}
- self.update_service_validator.validate(request_to_validate)
-
- def test_validate_service_update_fails_with_invalid_enabled(self):
- """Exception raised when boolean-like values as `enabled`."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': invalid_enabled}
- self.assertRaises(exception.SchemaValidationError,
- self.update_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_update_fails_with_name_too_long(self):
- """Exception is raised when `name` is too long on update."""
- long_name = 'a' * 256
- request_to_validate = {'name': long_name}
- self.assertRaises(exception.SchemaValidationError,
- self.update_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_update_fails_with_name_too_short(self):
- """Exception is raised when `name` is too short on update."""
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.update_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_update_fails_with_type_too_long(self):
- """Exception is raised when `type` is too long on update."""
- long_type_name = 'a' * 256
- request_to_validate = {'type': long_type_name}
- self.assertRaises(exception.SchemaValidationError,
- self.update_service_validator.validate,
- request_to_validate)
-
- def test_validate_service_update_fails_with_type_too_short(self):
- """Exception is raised when `type` is too short on update."""
- request_to_validate = {'type': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.update_service_validator.validate,
- request_to_validate)
-
-
-class EndpointValidationTestCase(unit.BaseTestCase):
- """Test for V3 Endpoint API validation."""
-
- def setUp(self):
- super(EndpointValidationTestCase, self).setUp()
-
- create = catalog_schema.endpoint_create
- update = catalog_schema.endpoint_update
- self.create_endpoint_validator = validators.SchemaValidator(create)
- self.update_endpoint_validator = validators.SchemaValidator(update)
-
- def test_validate_endpoint_request_succeeds(self):
- """Test that we validate an endpoint request."""
- request_to_validate = {'enabled': True,
- 'interface': 'admin',
- 'region_id': uuid.uuid4().hex,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
- self.create_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_create_succeeds_with_required_parameters(self):
- """Validate an endpoint request with only the required parameters."""
- # According to the Identity V3 API endpoint creation requires
- # 'service_id', 'interface', and 'url'
- request_to_validate = {'service_id': uuid.uuid4().hex,
- 'interface': 'public',
- 'url': 'https://service.example.com:5000/'}
- self.create_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_create_succeeds_with_valid_enabled(self):
- """Validate an endpoint with boolean values.
-
- Validate boolean values as `enabled` in endpoint create requests.
- """
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': valid_enabled,
- 'service_id': uuid.uuid4().hex,
- 'interface': 'public',
- 'url': 'https://service.example.com:5000/'}
- self.create_endpoint_validator.validate(request_to_validate)
-
- def test_validate_create_endpoint_fails_with_invalid_enabled(self):
- """Exception raised when boolean-like values as `enabled`."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': invalid_enabled,
- 'service_id': uuid.uuid4().hex,
- 'interface': 'public',
- 'url': 'https://service.example.com:5000/'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_create_succeeds_with_extra_parameters(self):
- """Test that extra parameters pass validation on create endpoint."""
- request_to_validate = {'other_attr': uuid.uuid4().hex,
- 'service_id': uuid.uuid4().hex,
- 'interface': 'public',
- 'url': 'https://service.example.com:5000/'}
- self.create_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_create_fails_without_service_id(self):
- """Exception raised when `service_id` isn't in endpoint request."""
- request_to_validate = {'interface': 'public',
- 'url': 'https://service.example.com:5000/'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_create_fails_without_interface(self):
- """Exception raised when `interface` isn't in endpoint request."""
- request_to_validate = {'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_create_fails_without_url(self):
- """Exception raised when `url` isn't in endpoint request."""
- request_to_validate = {'service_id': uuid.uuid4().hex,
- 'interface': 'public'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_create_succeeds_with_url(self):
- """Validate `url` attribute in endpoint create request."""
- request_to_validate = {'service_id': uuid.uuid4().hex,
- 'interface': 'public'}
- for url in _VALID_URLS:
- request_to_validate['url'] = url
- self.create_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_create_fails_with_invalid_url(self):
- """Exception raised when passing invalid `url` in request."""
- request_to_validate = {'service_id': uuid.uuid4().hex,
- 'interface': 'public'}
- for url in _INVALID_URLS:
- request_to_validate['url'] = url
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_create_fails_with_invalid_interface(self):
- """Exception raised with invalid `interface`."""
- request_to_validate = {'interface': uuid.uuid4().hex,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_create_fails_with_invalid_region_id(self):
- """Exception raised when passing invalid `region(_id)` in request."""
- request_to_validate = {'interface': 'admin',
- 'region_id': 1234,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
-
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- request_to_validate = {'interface': 'admin',
- 'region': 1234,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
-
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_update_fails_with_invalid_enabled(self):
- """Exception raised when `enabled` is boolean-like value."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': invalid_enabled}
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_update_succeeds_with_valid_enabled(self):
- """Validate `enabled` as boolean values."""
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': valid_enabled}
- self.update_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_update_fails_with_invalid_interface(self):
- """Exception raised when invalid `interface` on endpoint update."""
- request_to_validate = {'interface': uuid.uuid4().hex,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_update_request_succeeds(self):
- """Test that we validate an endpoint update request."""
- request_to_validate = {'enabled': True,
- 'interface': 'admin',
- 'region_id': uuid.uuid4().hex,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
- self.update_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_update_fails_with_no_parameters(self):
- """Exception raised when no parameters on endpoint update."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_update_succeeds_with_extra_parameters(self):
- """Test that extra parameters pass validation on update endpoint."""
- request_to_validate = {'enabled': True,
- 'interface': 'admin',
- 'region_id': uuid.uuid4().hex,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/',
- 'other_attr': uuid.uuid4().hex}
- self.update_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_update_succeeds_with_url(self):
- """Validate `url` attribute in endpoint update request."""
- request_to_validate = {'service_id': uuid.uuid4().hex,
- 'interface': 'public'}
- for url in _VALID_URLS:
- request_to_validate['url'] = url
- self.update_endpoint_validator.validate(request_to_validate)
-
- def test_validate_endpoint_update_fails_with_invalid_url(self):
- """Exception raised when passing invalid `url` in request."""
- request_to_validate = {'service_id': uuid.uuid4().hex,
- 'interface': 'public'}
- for url in _INVALID_URLS:
- request_to_validate['url'] = url
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_update_fails_with_invalid_region_id(self):
- """Exception raised when passing invalid `region(_id)` in request."""
- request_to_validate = {'interface': 'admin',
- 'region_id': 1234,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_validator.validate,
- request_to_validate)
-
- request_to_validate = {'interface': 'admin',
- 'region': 1234,
- 'service_id': uuid.uuid4().hex,
- 'url': 'https://service.example.com:5000/'}
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_validator.validate,
- request_to_validate)
-
-
-class EndpointGroupValidationTestCase(unit.BaseTestCase):
- """Test for V3 Endpoint Group API validation."""
-
- def setUp(self):
- super(EndpointGroupValidationTestCase, self).setUp()
-
- create = catalog_schema.endpoint_group_create
- update = catalog_schema.endpoint_group_update
- self.create_endpoint_grp_validator = validators.SchemaValidator(create)
- self.update_endpoint_grp_validator = validators.SchemaValidator(update)
-
- def test_validate_endpoint_group_request_succeeds(self):
- """Test that we validate an endpoint group request."""
- request_to_validate = {'description': 'endpoint group description',
- 'filters': {'interface': 'admin'},
- 'name': 'endpoint_group_name'}
- self.create_endpoint_grp_validator.validate(request_to_validate)
-
- def test_validate_endpoint_group_create_succeeds_with_req_parameters(self):
- """Validate required endpoint group parameters.
-
- This test ensure that validation succeeds with only the required
- parameters passed for creating an endpoint group.
- """
- request_to_validate = {'filters': {'interface': 'admin'},
- 'name': 'endpoint_group_name'}
- self.create_endpoint_grp_validator.validate(request_to_validate)
-
- def test_validate_endpoint_group_create_succeeds_with_valid_filters(self):
- """Validate `filters` in endpoint group create requests."""
- request_to_validate = {'description': 'endpoint group description',
- 'name': 'endpoint_group_name'}
- for valid_filters in _VALID_FILTERS:
- request_to_validate['filters'] = valid_filters
- self.create_endpoint_grp_validator.validate(request_to_validate)
-
- def test_validate_create_endpoint_group_fails_with_invalid_filters(self):
- """Validate invalid `filters` value in endpoint group parameters.
-
- This test ensures that exception is raised when non-dict values is
- used as `filters` in endpoint group create request.
- """
- request_to_validate = {'description': 'endpoint group description',
- 'name': 'endpoint_group_name'}
- for invalid_filters in _INVALID_FILTERS:
- request_to_validate['filters'] = invalid_filters
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_grp_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_group_create_fails_without_name(self):
- """Exception raised when `name` isn't in endpoint group request."""
- request_to_validate = {'description': 'endpoint group description',
- 'filters': {'interface': 'admin'}}
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_grp_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_group_create_fails_without_filters(self):
- """Exception raised when `filters` isn't in endpoint group request."""
- request_to_validate = {'description': 'endpoint group description',
- 'name': 'endpoint_group_name'}
- self.assertRaises(exception.SchemaValidationError,
- self.create_endpoint_grp_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_group_update_request_succeeds(self):
- """Test that we validate an endpoint group update request."""
- request_to_validate = {'description': 'endpoint group description',
- 'filters': {'interface': 'admin'},
- 'name': 'endpoint_group_name'}
- self.update_endpoint_grp_validator.validate(request_to_validate)
-
- def test_validate_endpoint_group_update_fails_with_no_parameters(self):
- """Exception raised when no parameters on endpoint group update."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_grp_validator.validate,
- request_to_validate)
-
- def test_validate_endpoint_group_update_succeeds_with_name(self):
- """Validate request with only `name` in endpoint group update.
-
- This test ensures that passing only a `name` passes validation
- on update endpoint group request.
- """
- request_to_validate = {'name': 'endpoint_group_name'}
- self.update_endpoint_grp_validator.validate(request_to_validate)
-
- def test_validate_endpoint_group_update_succeeds_with_valid_filters(self):
- """Validate `filters` as dict values."""
- for valid_filters in _VALID_FILTERS:
- request_to_validate = {'filters': valid_filters}
- self.update_endpoint_grp_validator.validate(request_to_validate)
-
- def test_validate_endpoint_group_update_fails_with_invalid_filters(self):
- """Exception raised when passing invalid `filters` in request."""
- for invalid_filters in _INVALID_FILTERS:
- request_to_validate = {'filters': invalid_filters}
- self.assertRaises(exception.SchemaValidationError,
- self.update_endpoint_grp_validator.validate,
- request_to_validate)
-
-
-class TrustValidationTestCase(unit.BaseTestCase):
- """Test for V3 Trust API validation."""
-
- _valid_roles = ['member', uuid.uuid4().hex, str(uuid.uuid4())]
- _invalid_roles = [False, True, 123, None]
-
- def setUp(self):
- super(TrustValidationTestCase, self).setUp()
-
- create = trust_schema.trust_create
- self.create_trust_validator = validators.SchemaValidator(create)
-
- def test_validate_trust_succeeds(self):
- """Test that we can validate a trust request."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False}
- self.create_trust_validator.validate(request_to_validate)
-
- def test_validate_trust_with_all_parameters_succeeds(self):
- """Test that we can validate a trust request with all parameters."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'project_id': uuid.uuid4().hex,
- 'roles': [uuid.uuid4().hex, uuid.uuid4().hex],
- 'expires_at': 'some timestamp',
- 'remaining_uses': 2}
- self.create_trust_validator.validate(request_to_validate)
-
- def test_validate_trust_without_trustor_id_fails(self):
- """Validate trust request fails without `trustor_id`."""
- request_to_validate = {'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False}
- self.assertRaises(exception.SchemaValidationError,
- self.create_trust_validator.validate,
- request_to_validate)
-
- def test_validate_trust_without_trustee_id_fails(self):
- """Validate trust request fails without `trustee_id`."""
- request_to_validate = {'trusor_user_id': uuid.uuid4().hex,
- 'impersonation': False}
- self.assertRaises(exception.SchemaValidationError,
- self.create_trust_validator.validate,
- request_to_validate)
-
- def test_validate_trust_without_impersonation_fails(self):
- """Validate trust request fails without `impersonation`."""
- request_to_validate = {'trustee_user_id': uuid.uuid4().hex,
- 'trustor_user_id': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_trust_validator.validate,
- request_to_validate)
-
- def test_validate_trust_with_extra_parameters_succeeds(self):
- """Test that we can validate a trust request with extra parameters."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'project_id': uuid.uuid4().hex,
- 'roles': [uuid.uuid4().hex, uuid.uuid4().hex],
- 'expires_at': 'some timestamp',
- 'remaining_uses': 2,
- 'extra': 'something extra!'}
- self.create_trust_validator.validate(request_to_validate)
-
- def test_validate_trust_with_invalid_impersonation_fails(self):
- """Validate trust request with invalid `impersonation` fails."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': 2}
- self.assertRaises(exception.SchemaValidationError,
- self.create_trust_validator.validate,
- request_to_validate)
-
- def test_validate_trust_with_null_remaining_uses_succeeds(self):
- """Validate trust request with null `remaining_uses`."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'remaining_uses': None}
- self.create_trust_validator.validate(request_to_validate)
-
- def test_validate_trust_with_remaining_uses_succeeds(self):
- """Validate trust request with `remaining_uses` succeeds."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'remaining_uses': 2}
- self.create_trust_validator.validate(request_to_validate)
-
- def test_validate_trust_with_period_in_user_id_string(self):
- """Validate trust request with a period in the user id string."""
- request_to_validate = {'trustor_user_id': 'john.smith',
- 'trustee_user_id': 'joe.developer',
- 'impersonation': False}
- self.create_trust_validator.validate(request_to_validate)
-
- def test_validate_trust_with_invalid_expires_at_fails(self):
- """Validate trust request with invalid `expires_at` fails."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'expires_at': 3}
- self.assertRaises(exception.SchemaValidationError,
- self.create_trust_validator.validate,
- request_to_validate)
-
- def test_validate_trust_with_role_types_succeeds(self):
- """Validate trust request with `roles` succeeds."""
- for role in self._valid_roles:
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'roles': [role]}
- self.create_trust_validator.validate(request_to_validate)
-
- def test_validate_trust_with_invalid_role_type_fails(self):
- """Validate trust request with invalid `roles` fails."""
- for role in self._invalid_roles:
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'roles': role}
- self.assertRaises(exception.SchemaValidationError,
- self.create_trust_validator.validate,
- request_to_validate)
-
- def test_validate_trust_with_list_of_valid_roles_succeeds(self):
- """Validate trust request with a list of valid `roles`."""
- request_to_validate = {'trustor_user_id': uuid.uuid4().hex,
- 'trustee_user_id': uuid.uuid4().hex,
- 'impersonation': False,
- 'roles': self._valid_roles}
- self.create_trust_validator.validate(request_to_validate)
-
-
-class ServiceProviderValidationTestCase(unit.BaseTestCase):
- """Test for V3 Service Provider API validation."""
-
- def setUp(self):
- super(ServiceProviderValidationTestCase, self).setUp()
-
- self.valid_auth_url = 'https://' + uuid.uuid4().hex + '.com'
- self.valid_sp_url = 'https://' + uuid.uuid4().hex + '.com'
-
- create = federation_schema.service_provider_create
- update = federation_schema.service_provider_update
- self.create_sp_validator = validators.SchemaValidator(create)
- self.update_sp_validator = validators.SchemaValidator(update)
-
- def test_validate_sp_request(self):
- """Test that we validate `auth_url` and `sp_url` in request."""
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- 'sp_url': self.valid_sp_url
- }
- self.create_sp_validator.validate(request_to_validate)
-
- def test_validate_sp_request_with_invalid_auth_url_fails(self):
- """Validate request fails with invalid `auth_url`."""
- request_to_validate = {
- 'auth_url': uuid.uuid4().hex,
- 'sp_url': self.valid_sp_url
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_request_with_invalid_sp_url_fails(self):
- """Validate request fails with invalid `sp_url`."""
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- 'sp_url': uuid.uuid4().hex,
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_request_without_auth_url_fails(self):
- """Validate request fails without `auth_url`."""
- request_to_validate = {
- 'sp_url': self.valid_sp_url
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
- request_to_validate = {
- 'auth_url': None,
- 'sp_url': self.valid_sp_url
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_request_without_sp_url_fails(self):
- """Validate request fails without `sp_url`."""
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- 'sp_url': None,
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_request_with_enabled(self):
- """Validate `enabled` as boolean-like values."""
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- 'sp_url': self.valid_sp_url,
- 'enabled': valid_enabled
- }
- self.create_sp_validator.validate(request_to_validate)
-
- def test_validate_sp_request_with_invalid_enabled_fails(self):
- """Exception is raised when `enabled` isn't a boolean-like value."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- 'sp_url': self.valid_sp_url,
- 'enabled': invalid_enabled
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_request_with_valid_description(self):
- """Test that we validate `description` in create requests."""
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- 'sp_url': self.valid_sp_url,
- 'description': 'My Service Provider'
- }
- self.create_sp_validator.validate(request_to_validate)
-
- def test_validate_sp_request_with_invalid_description_fails(self):
- """Exception is raised when `description` as a non-string value."""
- request_to_validate = {
- 'auth_url': self.valid_auth_url,
- 'sp_url': self.valid_sp_url,
- 'description': False
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_request_with_extra_field_fails(self):
- """Exception raised when passing extra fields in the body."""
- # 'id' can't be passed in the body since it is passed in the URL
- request_to_validate = {
- 'id': 'ACME',
- 'auth_url': self.valid_auth_url,
- 'sp_url': self.valid_sp_url,
- 'description': 'My Service Provider'
- }
- self.assertRaises(exception.SchemaValidationError,
- self.create_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_update_request(self):
- """Test that we validate a update request."""
- request_to_validate = {'description': uuid.uuid4().hex}
- self.update_sp_validator.validate(request_to_validate)
-
- def test_validate_sp_update_request_with_no_parameters_fails(self):
- """Exception is raised when updating without parameters."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_update_request_with_invalid_auth_url_fails(self):
- """Exception raised when updating with invalid `auth_url`."""
- request_to_validate = {'auth_url': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.update_sp_validator.validate,
- request_to_validate)
- request_to_validate = {'auth_url': None}
- self.assertRaises(exception.SchemaValidationError,
- self.update_sp_validator.validate,
- request_to_validate)
-
- def test_validate_sp_update_request_with_invalid_sp_url_fails(self):
- """Exception raised when updating with invalid `sp_url`."""
- request_to_validate = {'sp_url': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.update_sp_validator.validate,
- request_to_validate)
- request_to_validate = {'sp_url': None}
- self.assertRaises(exception.SchemaValidationError,
- self.update_sp_validator.validate,
- request_to_validate)
-
-
-class UserValidationTestCase(unit.BaseTestCase):
- """Test for V3 User API validation."""
-
- def setUp(self):
- super(UserValidationTestCase, self).setUp()
-
- self.user_name = uuid.uuid4().hex
-
- create = identity_schema.user_create
- update = identity_schema.user_update
- self.create_user_validator = validators.SchemaValidator(create)
- self.update_user_validator = validators.SchemaValidator(update)
-
- def test_validate_user_create_request_succeeds(self):
- """Test that validating a user create request succeeds."""
- request_to_validate = {'name': self.user_name}
- self.create_user_validator.validate(request_to_validate)
-
- def test_validate_user_create_with_all_valid_parameters_succeeds(self):
- """Test that validating a user create request succeeds."""
- request_to_validate = unit.new_user_ref(domain_id=uuid.uuid4().hex,
- name=self.user_name)
- self.create_user_validator.validate(request_to_validate)
-
- def test_validate_user_create_fails_without_name(self):
- """Exception raised when validating a user without name."""
- request_to_validate = {'email': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_user_validator.validate,
- request_to_validate)
-
- def test_validate_user_create_fails_with_name_of_zero_length(self):
- """Exception raised when validating a username with length of zero."""
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.create_user_validator.validate,
- request_to_validate)
-
- def test_validate_user_create_fails_with_name_of_wrong_type(self):
- """Exception raised when validating a username of wrong type."""
- request_to_validate = {'name': True}
- self.assertRaises(exception.SchemaValidationError,
- self.create_user_validator.validate,
- request_to_validate)
-
- def test_validate_user_create_succeeds_with_valid_enabled_formats(self):
- """Validate acceptable enabled formats in create user requests."""
- for enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.user_name,
- 'enabled': enabled}
- self.create_user_validator.validate(request_to_validate)
-
- def test_validate_user_create_fails_with_invalid_enabled_formats(self):
- """Exception raised when enabled is not an acceptable format."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'name': self.user_name,
- 'enabled': invalid_enabled}
- self.assertRaises(exception.SchemaValidationError,
- self.create_user_validator.validate,
- request_to_validate)
-
- def test_validate_user_create_succeeds_with_extra_attributes(self):
- """Validate extra parameters on user create requests."""
- request_to_validate = {'name': self.user_name,
- 'other_attr': uuid.uuid4().hex}
- self.create_user_validator.validate(request_to_validate)
-
- def test_validate_user_create_succeeds_with_password_of_zero_length(self):
- """Validate empty password on user create requests."""
- request_to_validate = {'name': self.user_name,
- 'password': ''}
- self.create_user_validator.validate(request_to_validate)
-
- def test_validate_user_create_succeeds_with_null_password(self):
- """Validate that password is nullable on create user."""
- request_to_validate = {'name': self.user_name,
- 'password': None}
- self.create_user_validator.validate(request_to_validate)
-
- def test_validate_user_create_fails_with_invalid_password_type(self):
- """Exception raised when user password is of the wrong type."""
- request_to_validate = {'name': self.user_name,
- 'password': True}
- self.assertRaises(exception.SchemaValidationError,
- self.create_user_validator.validate,
- request_to_validate)
-
- def test_validate_user_create_succeeds_with_null_description(self):
- """Validate that description can be nullable on create user."""
- request_to_validate = {'name': self.user_name,
- 'description': None}
- self.create_user_validator.validate(request_to_validate)
-
- def test_validate_user_update_succeeds(self):
- """Validate an update user request."""
- request_to_validate = {'email': uuid.uuid4().hex}
- self.update_user_validator.validate(request_to_validate)
-
- def test_validate_user_update_fails_with_no_parameters(self):
- """Exception raised when updating nothing."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_user_validator.validate,
- request_to_validate)
-
- def test_validate_user_update_succeeds_with_extra_parameters(self):
- """Validate user update requests with extra parameters."""
- request_to_validate = {'other_attr': uuid.uuid4().hex}
- self.update_user_validator.validate(request_to_validate)
-
-
-class GroupValidationTestCase(unit.BaseTestCase):
- """Test for V3 Group API validation."""
-
- def setUp(self):
- super(GroupValidationTestCase, self).setUp()
-
- self.group_name = uuid.uuid4().hex
-
- create = identity_schema.group_create
- update = identity_schema.group_update
- self.create_group_validator = validators.SchemaValidator(create)
- self.update_group_validator = validators.SchemaValidator(update)
-
- def test_validate_group_create_succeeds(self):
- """Validate create group requests."""
- request_to_validate = {'name': self.group_name}
- self.create_group_validator.validate(request_to_validate)
-
- def test_validate_group_create_succeeds_with_all_parameters(self):
- """Validate create group requests with all parameters."""
- request_to_validate = {'name': self.group_name,
- 'description': uuid.uuid4().hex,
- 'domain_id': uuid.uuid4().hex}
- self.create_group_validator.validate(request_to_validate)
-
- def test_validate_group_create_fails_without_group_name(self):
- """Exception raised when group name is not provided in request."""
- request_to_validate = {'description': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_group_validator.validate,
- request_to_validate)
-
- def test_validate_group_create_fails_when_group_name_is_too_short(self):
- """Exception raised when group name is equal to zero."""
- request_to_validate = {'name': ''}
- self.assertRaises(exception.SchemaValidationError,
- self.create_group_validator.validate,
- request_to_validate)
-
- def test_validate_group_create_succeeds_with_extra_parameters(self):
- """Validate extra attributes on group create requests."""
- request_to_validate = {'name': self.group_name,
- 'other_attr': uuid.uuid4().hex}
- self.create_group_validator.validate(request_to_validate)
-
- def test_validate_group_update_succeeds(self):
- """Validate group update requests."""
- request_to_validate = {'description': uuid.uuid4().hex}
- self.update_group_validator.validate(request_to_validate)
-
- def test_validate_group_update_fails_with_no_parameters(self):
- """Exception raised when no parameters passed in on update."""
- request_to_validate = {}
- self.assertRaises(exception.SchemaValidationError,
- self.update_group_validator.validate,
- request_to_validate)
-
- def test_validate_group_update_succeeds_with_extra_parameters(self):
- """Validate group update requests with extra parameters."""
- request_to_validate = {'other_attr': uuid.uuid4().hex}
- self.update_group_validator.validate(request_to_validate)
-
-
-class IdentityProviderValidationTestCase(unit.BaseTestCase):
- """Test for V3 Identity Provider API validation."""
-
- def setUp(self):
- super(IdentityProviderValidationTestCase, self).setUp()
-
- create = federation_schema.identity_provider_create
- update = federation_schema.identity_provider_update
- self.create_idp_validator = validators.SchemaValidator(create)
- self.update_idp_validator = validators.SchemaValidator(update)
-
- def test_validate_idp_request_succeeds(self):
- """Test that we validate an identity provider request."""
- request_to_validate = {'description': 'identity provider description',
- 'enabled': True,
- 'remote_ids': [uuid.uuid4().hex,
- uuid.uuid4().hex]}
- self.create_idp_validator.validate(request_to_validate)
- self.update_idp_validator.validate(request_to_validate)
-
- def test_validate_idp_request_fails_with_invalid_params(self):
- """Exception raised when unknown parameter is found."""
- request_to_validate = {'bogus': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_idp_validator.validate,
- request_to_validate)
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_idp_validator.validate,
- request_to_validate)
-
- def test_validate_idp_request_with_enabled(self):
- """Validate `enabled` as boolean-like values."""
- for valid_enabled in _VALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': valid_enabled}
- self.create_idp_validator.validate(request_to_validate)
- self.update_idp_validator.validate(request_to_validate)
-
- def test_validate_idp_request_with_invalid_enabled_fails(self):
- """Exception is raised when `enabled` isn't a boolean-like value."""
- for invalid_enabled in _INVALID_ENABLED_FORMATS:
- request_to_validate = {'enabled': invalid_enabled}
- self.assertRaises(exception.SchemaValidationError,
- self.create_idp_validator.validate,
- request_to_validate)
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_idp_validator.validate,
- request_to_validate)
-
- def test_validate_idp_request_no_parameters(self):
- """Test that schema validation with empty request body."""
- request_to_validate = {}
- self.create_idp_validator.validate(request_to_validate)
-
- # Exception raised when no property on IdP update.
- self.assertRaises(exception.SchemaValidationError,
- self.update_idp_validator.validate,
- request_to_validate)
-
- def test_validate_idp_request_with_invalid_description_fails(self):
- """Exception is raised when `description` as a non-string value."""
- request_to_validate = {'description': False}
- self.assertRaises(exception.SchemaValidationError,
- self.create_idp_validator.validate,
- request_to_validate)
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_idp_validator.validate,
- request_to_validate)
-
- def test_validate_idp_request_with_invalid_remote_id_fails(self):
- """Exception is raised when `remote_ids` is not a array."""
- request_to_validate = {"remote_ids": uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.create_idp_validator.validate,
- request_to_validate)
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_idp_validator.validate,
- request_to_validate)
-
- def test_validate_idp_request_with_duplicated_remote_id(self):
- """Exception is raised when the duplicated `remote_ids` is found."""
- idp_id = uuid.uuid4().hex
- request_to_validate = {"remote_ids": [idp_id, idp_id]}
- self.assertRaises(exception.SchemaValidationError,
- self.create_idp_validator.validate,
- request_to_validate)
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_idp_validator.validate,
- request_to_validate)
-
- def test_validate_idp_request_remote_id_nullable(self):
- """Test that `remote_ids` could be explicitly set to None"""
- request_to_validate = {'remote_ids': None}
- self.create_idp_validator.validate(request_to_validate)
- self.update_idp_validator.validate(request_to_validate)
-
-
-class FederationProtocolValidationTestCase(unit.BaseTestCase):
- """Test for V3 Federation Protocol API validation."""
-
- def setUp(self):
- super(FederationProtocolValidationTestCase, self).setUp()
-
- schema = federation_schema.federation_protocol_schema
- # create protocol and update protocol have the same shema definition,
- # combine them together, no need to validate separately.
- self.protocol_validator = validators.SchemaValidator(schema)
-
- def test_validate_protocol_request_succeeds(self):
- """Test that we validate a protocol request successfully."""
- request_to_validate = {'mapping_id': uuid.uuid4().hex}
- self.protocol_validator.validate(request_to_validate)
-
- def test_validate_protocol_request_succeeds_with_nonuuid_mapping_id(self):
- """Test that we allow underscore in mapping_id value."""
- request_to_validate = {'mapping_id': 'my_mapping_id'}
- self.protocol_validator.validate(request_to_validate)
-
- def test_validate_protocol_request_fails_with_invalid_params(self):
- """Exception raised when unknown parameter is found."""
- request_to_validate = {'bogus': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.protocol_validator.validate,
- request_to_validate)
-
- def test_validate_protocol_request_no_parameters(self):
- """Test that schema validation with empty request body."""
- request_to_validate = {}
- # 'mapping_id' is required.
- self.assertRaises(exception.SchemaValidationError,
- self.protocol_validator.validate,
- request_to_validate)
-
- def test_validate_protocol_request_fails_with_invalid_mapping_id(self):
- """Exception raised when mapping_id is not string."""
- request_to_validate = {'mapping_id': 12334}
- self.assertRaises(exception.SchemaValidationError,
- self.protocol_validator.validate,
- request_to_validate)
-
-
-class OAuth1ValidationTestCase(unit.BaseTestCase):
- """Test for V3 Identity OAuth1 API validation."""
-
- def setUp(self):
- super(OAuth1ValidationTestCase, self).setUp()
-
- create = oauth1_schema.consumer_create
- update = oauth1_schema.consumer_update
- self.create_consumer_validator = validators.SchemaValidator(create)
- self.update_consumer_validator = validators.SchemaValidator(update)
-
- def test_validate_consumer_request_succeeds(self):
- """Test that we validate a consumer request successfully."""
- request_to_validate = {'description': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.create_consumer_validator.validate(request_to_validate)
- self.update_consumer_validator.validate(request_to_validate)
-
- def test_validate_consumer_request_with_no_parameters(self):
- """Test that schema validation with empty request body."""
- request_to_validate = {}
- self.create_consumer_validator.validate(request_to_validate)
- # At least one property should be given.
- self.assertRaises(exception.SchemaValidationError,
- self.update_consumer_validator.validate,
- request_to_validate)
-
- def test_validate_consumer_request_with_invalid_description_fails(self):
- """Exception is raised when `description` as a non-string value."""
- for invalid_desc in _INVALID_DESC_FORMATS:
- request_to_validate = {'description': invalid_desc}
- self.assertRaises(exception.SchemaValidationError,
- self.create_consumer_validator.validate,
- request_to_validate)
-
- self.assertRaises(exception.SchemaValidationError,
- self.update_consumer_validator.validate,
- request_to_validate)
-
- def test_validate_update_consumer_request_fails_with_secret(self):
- """Exception raised when secret is given."""
- request_to_validate = {'secret': uuid.uuid4().hex}
- self.assertRaises(exception.SchemaValidationError,
- self.update_consumer_validator.validate,
- request_to_validate)
-
- def test_validate_consumer_request_with_none_desc(self):
- """Test that schema validation with None desc."""
- request_to_validate = {'description': None}
- self.create_consumer_validator.validate(request_to_validate)
- self.update_consumer_validator.validate(request_to_validate)
diff --git a/keystone-moon/keystone/tests/unit/test_versions.py b/keystone-moon/keystone/tests/unit/test_versions.py
deleted file mode 100644
index 2f5c2b17..00000000
--- a/keystone-moon/keystone/tests/unit/test_versions.py
+++ /dev/null
@@ -1,1065 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-import functools
-import random
-
-import mock
-from oslo_config import cfg
-from oslo_serialization import jsonutils
-from six.moves import http_client
-from testtools import matchers as tt_matchers
-import webob
-
-from keystone.common import json_home
-from keystone.tests import unit
-from keystone.tests.unit import utils
-from keystone.version import controllers
-
-
-CONF = cfg.CONF
-
-v2_MEDIA_TYPES = [
- {
- "base": "application/json",
- "type": "application/"
- "vnd.openstack.identity-v2.0+json"
- }
-]
-
-v2_HTML_DESCRIPTION = {
- "rel": "describedby",
- "type": "text/html",
- "href": "http://docs.openstack.org/"
-}
-
-
-v2_EXPECTED_RESPONSE = {
- "id": "v2.0",
- "status": "stable",
- "updated": "2014-04-17T00:00:00Z",
- "links": [
- {
- "rel": "self",
- "href": "", # Will get filled in after initialization
- },
- v2_HTML_DESCRIPTION
- ],
- "media-types": v2_MEDIA_TYPES
-}
-
-v2_VERSION_RESPONSE = {
- "version": v2_EXPECTED_RESPONSE
-}
-
-v3_MEDIA_TYPES = [
- {
- "base": "application/json",
- "type": "application/"
- "vnd.openstack.identity-v3+json"
- }
-]
-
-v3_EXPECTED_RESPONSE = {
- "id": "v3.6",
- "status": "stable",
- "updated": "2016-04-04T00:00:00Z",
- "links": [
- {
- "rel": "self",
- "href": "", # Will get filled in after initialization
- }
- ],
- "media-types": v3_MEDIA_TYPES
-}
-
-v3_VERSION_RESPONSE = {
- "version": v3_EXPECTED_RESPONSE
-}
-
-VERSIONS_RESPONSE = {
- "versions": {
- "values": [
- v3_EXPECTED_RESPONSE,
- v2_EXPECTED_RESPONSE
- ]
- }
-}
-
-_build_ec2tokens_relation = functools.partial(
- json_home.build_v3_extension_resource_relation, extension_name='OS-EC2',
- extension_version='1.0')
-
-REVOCATIONS_RELATION = json_home.build_v3_extension_resource_relation(
- 'OS-PKI', '1.0', 'revocations')
-
-_build_simple_cert_relation = functools.partial(
- json_home.build_v3_extension_resource_relation,
- extension_name='OS-SIMPLE-CERT', extension_version='1.0')
-
-_build_trust_relation = functools.partial(
- json_home.build_v3_extension_resource_relation, extension_name='OS-TRUST',
- extension_version='1.0')
-
-_build_federation_rel = functools.partial(
- json_home.build_v3_extension_resource_relation,
- extension_name='OS-FEDERATION',
- extension_version='1.0')
-
-_build_oauth1_rel = functools.partial(
- json_home.build_v3_extension_resource_relation,
- extension_name='OS-OAUTH1', extension_version='1.0')
-
-_build_ep_policy_rel = functools.partial(
- json_home.build_v3_extension_resource_relation,
- extension_name='OS-ENDPOINT-POLICY', extension_version='1.0')
-
-_build_ep_filter_rel = functools.partial(
- json_home.build_v3_extension_resource_relation,
- extension_name='OS-EP-FILTER', extension_version='1.0')
-
-_build_os_inherit_rel = functools.partial(
- json_home.build_v3_extension_resource_relation,
- extension_name='OS-INHERIT', extension_version='1.0')
-
-TRUST_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation(
- 'OS-TRUST', '1.0', 'trust_id')
-
-IDP_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation(
- 'OS-FEDERATION', '1.0', 'idp_id')
-
-PROTOCOL_ID_PARAM_RELATION = json_home.build_v3_extension_parameter_relation(
- 'OS-FEDERATION', '1.0', 'protocol_id')
-
-MAPPING_ID_PARAM_RELATION = json_home.build_v3_extension_parameter_relation(
- 'OS-FEDERATION', '1.0', 'mapping_id')
-
-SP_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation(
- 'OS-FEDERATION', '1.0', 'sp_id')
-
-CONSUMER_ID_PARAMETER_RELATION = (
- json_home.build_v3_extension_parameter_relation(
- 'OS-OAUTH1', '1.0', 'consumer_id'))
-
-REQUEST_TOKEN_ID_PARAMETER_RELATION = (
- json_home.build_v3_extension_parameter_relation(
- 'OS-OAUTH1', '1.0', 'request_token_id'))
-
-ACCESS_TOKEN_ID_PARAMETER_RELATION = (
- json_home.build_v3_extension_parameter_relation(
- 'OS-OAUTH1', '1.0', 'access_token_id'))
-
-ENDPOINT_GROUP_ID_PARAMETER_RELATION = (
- json_home.build_v3_extension_parameter_relation(
- 'OS-EP-FILTER', '1.0', 'endpoint_group_id'))
-
-BASE_IDP_PROTOCOL = '/OS-FEDERATION/identity_providers/{idp_id}/protocols'
-BASE_EP_POLICY = '/policies/{policy_id}/OS-ENDPOINT-POLICY'
-BASE_EP_FILTER_PREFIX = '/OS-EP-FILTER'
-BASE_EP_FILTER = BASE_EP_FILTER_PREFIX + '/endpoint_groups/{endpoint_group_id}'
-BASE_ACCESS_TOKEN = (
- '/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}')
-
-FEDERATED_AUTH_URL = ('/OS-FEDERATION/identity_providers/{idp_id}'
- '/protocols/{protocol_id}/auth')
-FEDERATED_IDP_SPECIFIC_WEBSSO = ('/auth/OS-FEDERATION/identity_providers/'
- '{idp_id}/protocols/{protocol_id}/websso')
-
-V3_JSON_HOME_RESOURCES = {
- json_home.build_v3_resource_relation('auth_tokens'): {
- 'href': '/auth/tokens'},
- json_home.build_v3_resource_relation('auth_catalog'): {
- 'href': '/auth/catalog'},
- json_home.build_v3_resource_relation('auth_projects'): {
- 'href': '/auth/projects'},
- json_home.build_v3_resource_relation('auth_domains'): {
- 'href': '/auth/domains'},
- json_home.build_v3_resource_relation('credential'): {
- 'href-template': '/credentials/{credential_id}',
- 'href-vars': {
- 'credential_id':
- json_home.build_v3_parameter_relation('credential_id')}},
- json_home.build_v3_resource_relation('credentials'): {
- 'href': '/credentials'},
- json_home.build_v3_resource_relation('domain'): {
- 'href-template': '/domains/{domain_id}',
- 'href-vars': {'domain_id': json_home.Parameters.DOMAIN_ID, }},
- json_home.build_v3_resource_relation('domain_group_role'): {
- 'href-template':
- '/domains/{domain_id}/groups/{group_id}/roles/{role_id}',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group_id': json_home.Parameters.GROUP_ID,
- 'role_id': json_home.Parameters.ROLE_ID, }},
- json_home.build_v3_resource_relation('domain_group_roles'): {
- 'href-template': '/domains/{domain_id}/groups/{group_id}/roles',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group_id': json_home.Parameters.GROUP_ID}},
- json_home.build_v3_resource_relation('domain_user_role'): {
- 'href-template':
- '/domains/{domain_id}/users/{user_id}/roles/{role_id}',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('domain_user_roles'): {
- 'href-template': '/domains/{domain_id}/users/{user_id}/roles',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('domains'): {'href': '/domains'},
- json_home.build_v3_resource_relation('endpoint'): {
- 'href-template': '/endpoints/{endpoint_id}',
- 'href-vars': {
- 'endpoint_id':
- json_home.build_v3_parameter_relation('endpoint_id'), }},
- json_home.build_v3_resource_relation('endpoints'): {
- 'href': '/endpoints'},
- _build_ec2tokens_relation(resource_name='ec2tokens'): {
- 'href': '/ec2tokens'},
- _build_ec2tokens_relation(resource_name='user_credential'): {
- 'href-template': '/users/{user_id}/credentials/OS-EC2/{credential_id}',
- 'href-vars': {
- 'credential_id':
- json_home.build_v3_parameter_relation('credential_id'),
- 'user_id': json_home.Parameters.USER_ID, }},
- _build_ec2tokens_relation(resource_name='user_credentials'): {
- 'href-template': '/users/{user_id}/credentials/OS-EC2',
- 'href-vars': {
- 'user_id': json_home.Parameters.USER_ID, }},
- REVOCATIONS_RELATION: {
- 'href': '/auth/tokens/OS-PKI/revoked'},
- 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-REVOKE/1.0/rel/'
- 'events': {
- 'href': '/OS-REVOKE/events'},
- _build_simple_cert_relation(resource_name='ca_certificate'): {
- 'href': '/OS-SIMPLE-CERT/ca'},
- _build_simple_cert_relation(resource_name='certificates'): {
- 'href': '/OS-SIMPLE-CERT/certificates'},
- _build_trust_relation(resource_name='trust'):
- {
- 'href-template': '/OS-TRUST/trusts/{trust_id}',
- 'href-vars': {'trust_id': TRUST_ID_PARAMETER_RELATION, }},
- _build_trust_relation(resource_name='trust_role'): {
- 'href-template': '/OS-TRUST/trusts/{trust_id}/roles/{role_id}',
- 'href-vars': {
- 'role_id': json_home.Parameters.ROLE_ID,
- 'trust_id': TRUST_ID_PARAMETER_RELATION, }},
- _build_trust_relation(resource_name='trust_roles'): {
- 'href-template': '/OS-TRUST/trusts/{trust_id}/roles',
- 'href-vars': {'trust_id': TRUST_ID_PARAMETER_RELATION, }},
- _build_trust_relation(resource_name='trusts'): {
- 'href': '/OS-TRUST/trusts'},
- 'http://docs.openstack.org/api/openstack-identity/3/ext/s3tokens/1.0/rel/'
- 's3tokens': {
- 'href': '/s3tokens'},
- json_home.build_v3_resource_relation('group'): {
- 'href-template': '/groups/{group_id}',
- 'href-vars': {
- 'group_id': json_home.Parameters.GROUP_ID, }},
- json_home.build_v3_resource_relation('group_user'): {
- 'href-template': '/groups/{group_id}/users/{user_id}',
- 'href-vars': {
- 'group_id': json_home.Parameters.GROUP_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('group_users'): {
- 'href-template': '/groups/{group_id}/users',
- 'href-vars': {'group_id': json_home.Parameters.GROUP_ID, }},
- json_home.build_v3_resource_relation('groups'): {'href': '/groups'},
- json_home.build_v3_resource_relation('policies'): {
- 'href': '/policies'},
- json_home.build_v3_resource_relation('policy'): {
- 'href-template': '/policies/{policy_id}',
- 'href-vars': {
- 'policy_id':
- json_home.build_v3_parameter_relation('policy_id'), }},
- json_home.build_v3_resource_relation('project'): {
- 'href-template': '/projects/{project_id}',
- 'href-vars': {
- 'project_id': json_home.Parameters.PROJECT_ID, }},
- json_home.build_v3_resource_relation('project_group_role'): {
- 'href-template':
- '/projects/{project_id}/groups/{group_id}/roles/{role_id}',
- 'href-vars': {
- 'group_id': json_home.Parameters.GROUP_ID,
- 'project_id': json_home.Parameters.PROJECT_ID,
- 'role_id': json_home.Parameters.ROLE_ID, }},
- json_home.build_v3_resource_relation('project_group_roles'): {
- 'href-template': '/projects/{project_id}/groups/{group_id}/roles',
- 'href-vars': {
- 'group_id': json_home.Parameters.GROUP_ID,
- 'project_id': json_home.Parameters.PROJECT_ID, }},
- json_home.build_v3_resource_relation('project_user_role'): {
- 'href-template':
- '/projects/{project_id}/users/{user_id}/roles/{role_id}',
- 'href-vars': {
- 'project_id': json_home.Parameters.PROJECT_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('project_user_roles'): {
- 'href-template': '/projects/{project_id}/users/{user_id}/roles',
- 'href-vars': {
- 'project_id': json_home.Parameters.PROJECT_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('projects'): {
- 'href': '/projects'},
- json_home.build_v3_resource_relation('region'): {
- 'href-template': '/regions/{region_id}',
- 'href-vars': {
- 'region_id':
- json_home.build_v3_parameter_relation('region_id'), }},
- json_home.build_v3_resource_relation('regions'): {'href': '/regions'},
- json_home.build_v3_resource_relation('role'): {
- 'href-template': '/roles/{role_id}',
- 'href-vars': {
- 'role_id': json_home.Parameters.ROLE_ID, }},
- json_home.build_v3_resource_relation('implied_roles'): {
- 'href-template': '/roles/{prior_role_id}/implies',
- 'href-vars': {
- 'prior_role_id': json_home.Parameters.ROLE_ID},
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('implied_role'): {
- 'href-template':
- '/roles/{prior_role_id}/implies/{implied_role_id}',
- 'href-vars': {
- 'prior_role_id': json_home.Parameters.ROLE_ID,
- 'implied_role_id': json_home.Parameters.ROLE_ID,
- },
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('role_inferences'): {
- 'href': '/role_inferences',
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('role_assignments'): {
- 'href': '/role_assignments'},
- json_home.build_v3_resource_relation('roles'): {'href': '/roles'},
- json_home.build_v3_resource_relation('service'): {
- 'href-template': '/services/{service_id}',
- 'href-vars': {
- 'service_id':
- json_home.build_v3_parameter_relation('service_id')}},
- json_home.build_v3_resource_relation('services'): {
- 'href': '/services'},
- json_home.build_v3_resource_relation('user'): {
- 'href-template': '/users/{user_id}',
- 'href-vars': {
- 'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('user_change_password'): {
- 'href-template': '/users/{user_id}/password',
- 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('user_groups'): {
- 'href-template': '/users/{user_id}/groups',
- 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('user_projects'): {
- 'href-template': '/users/{user_id}/projects',
- 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
- json_home.build_v3_resource_relation('users'): {'href': '/users'},
- _build_federation_rel(resource_name='domains'): {
- 'href': '/auth/domains'},
- _build_federation_rel(resource_name='websso'): {
- 'href-template': '/auth/OS-FEDERATION/websso/{protocol_id}',
- 'href-vars': {
- 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }},
- _build_federation_rel(resource_name='projects'): {
- 'href': '/auth/projects'},
- _build_federation_rel(resource_name='saml2'): {
- 'href': '/auth/OS-FEDERATION/saml2'},
- _build_federation_rel(resource_name='ecp'): {
- 'href': '/auth/OS-FEDERATION/saml2/ecp'},
- _build_federation_rel(resource_name='metadata'): {
- 'href': '/OS-FEDERATION/saml2/metadata'},
- _build_federation_rel(resource_name='identity_providers'): {
- 'href': '/OS-FEDERATION/identity_providers'},
- _build_federation_rel(resource_name='service_providers'): {
- 'href': '/OS-FEDERATION/service_providers'},
- _build_federation_rel(resource_name='mappings'): {
- 'href': '/OS-FEDERATION/mappings'},
- _build_federation_rel(resource_name='identity_provider'):
- {
- 'href-template': '/OS-FEDERATION/identity_providers/{idp_id}',
- 'href-vars': {'idp_id': IDP_ID_PARAMETER_RELATION, }},
- _build_federation_rel(resource_name='identity_providers'): {
- 'href-template': FEDERATED_IDP_SPECIFIC_WEBSSO,
- 'href-vars': {
- 'idp_id': IDP_ID_PARAMETER_RELATION,
- 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }},
- _build_federation_rel(resource_name='service_provider'):
- {
- 'href-template': '/OS-FEDERATION/service_providers/{sp_id}',
- 'href-vars': {'sp_id': SP_ID_PARAMETER_RELATION, }},
- _build_federation_rel(resource_name='mapping'):
- {
- 'href-template': '/OS-FEDERATION/mappings/{mapping_id}',
- 'href-vars': {'mapping_id': MAPPING_ID_PARAM_RELATION, }},
- _build_federation_rel(resource_name='identity_provider_protocol'): {
- 'href-template': BASE_IDP_PROTOCOL + '/{protocol_id}',
- 'href-vars': {
- 'idp_id': IDP_ID_PARAMETER_RELATION,
- 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }},
- _build_federation_rel(resource_name='identity_provider_protocols'): {
- 'href-template': BASE_IDP_PROTOCOL,
- 'href-vars': {
- 'idp_id': IDP_ID_PARAMETER_RELATION}},
- _build_federation_rel(resource_name='identity_provider_protocol_auth'): {
- 'href-template': FEDERATED_AUTH_URL,
- 'href-vars': {
- 'idp_id': IDP_ID_PARAMETER_RELATION,
- 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }},
- _build_oauth1_rel(resource_name='access_tokens'): {
- 'href': '/OS-OAUTH1/access_token'},
- _build_oauth1_rel(resource_name='request_tokens'): {
- 'href': '/OS-OAUTH1/request_token'},
- _build_oauth1_rel(resource_name='consumers'): {
- 'href': '/OS-OAUTH1/consumers'},
- _build_oauth1_rel(resource_name='authorize_request_token'):
- {
- 'href-template': '/OS-OAUTH1/authorize/{request_token_id}',
- 'href-vars': {'request_token_id':
- REQUEST_TOKEN_ID_PARAMETER_RELATION, }},
- _build_oauth1_rel(resource_name='consumer'):
- {
- 'href-template': '/OS-OAUTH1/consumers/{consumer_id}',
- 'href-vars': {'consumer_id': CONSUMER_ID_PARAMETER_RELATION, }},
- _build_oauth1_rel(resource_name='user_access_token'):
- {
- 'href-template': BASE_ACCESS_TOKEN,
- 'href-vars': {'user_id': json_home.Parameters.USER_ID,
- 'access_token_id':
- ACCESS_TOKEN_ID_PARAMETER_RELATION, }},
- _build_oauth1_rel(resource_name='user_access_tokens'):
- {
- 'href-template': '/users/{user_id}/OS-OAUTH1/access_tokens',
- 'href-vars': {'user_id': json_home.Parameters.USER_ID, }},
- _build_oauth1_rel(resource_name='user_access_token_role'):
- {
- 'href-template': BASE_ACCESS_TOKEN + '/roles/{role_id}',
- 'href-vars': {'user_id': json_home.Parameters.USER_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- 'access_token_id':
- ACCESS_TOKEN_ID_PARAMETER_RELATION, }},
- _build_oauth1_rel(resource_name='user_access_token_roles'):
- {
- 'href-template': BASE_ACCESS_TOKEN + '/roles',
- 'href-vars': {'user_id': json_home.Parameters.USER_ID,
- 'access_token_id':
- ACCESS_TOKEN_ID_PARAMETER_RELATION, }},
- _build_ep_policy_rel(resource_name='endpoint_policy'):
- {
- 'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/policy',
- 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, }},
- _build_ep_policy_rel(resource_name='endpoint_policy_association'):
- {
- 'href-template': BASE_EP_POLICY + '/endpoints/{endpoint_id}',
- 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID,
- 'policy_id': json_home.Parameters.POLICY_ID, }},
- _build_ep_policy_rel(resource_name='policy_endpoints'):
- {
- 'href-template': BASE_EP_POLICY + '/endpoints',
- 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID, }},
- _build_ep_policy_rel(
- resource_name='region_and_service_policy_association'):
- {
- 'href-template': (BASE_EP_POLICY +
- '/services/{service_id}/regions/{region_id}'),
- 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID,
- 'service_id': json_home.Parameters.SERVICE_ID,
- 'region_id': json_home.Parameters.REGION_ID, }},
- _build_ep_policy_rel(resource_name='service_policy_association'):
- {
- 'href-template': BASE_EP_POLICY + '/services/{service_id}',
- 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID,
- 'service_id': json_home.Parameters.SERVICE_ID, }},
- _build_ep_filter_rel(resource_name='endpoint_group'):
- {
- 'href-template': '/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}',
- 'href-vars': {'endpoint_group_id':
- ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
- _build_ep_filter_rel(
- resource_name='endpoint_group_to_project_association'):
- {
- 'href-template': BASE_EP_FILTER + '/projects/{project_id}',
- 'href-vars': {'endpoint_group_id':
- ENDPOINT_GROUP_ID_PARAMETER_RELATION,
- 'project_id': json_home.Parameters.PROJECT_ID, }},
- _build_ep_filter_rel(resource_name='endpoint_groups'):
- {'href': '/OS-EP-FILTER/endpoint_groups'},
- _build_ep_filter_rel(resource_name='endpoint_projects'):
- {
- 'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects',
- 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, }},
- _build_ep_filter_rel(resource_name='endpoints_in_endpoint_group'):
- {
- 'href-template': BASE_EP_FILTER + '/endpoints',
- 'href-vars': {'endpoint_group_id':
- ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
- _build_ep_filter_rel(resource_name='project_endpoint_groups'):
- {
- 'href-template': (BASE_EP_FILTER_PREFIX + '/projects/{project_id}' +
- '/endpoint_groups'),
- 'href-vars': {'project_id':
- json_home.Parameters.PROJECT_ID, }},
- _build_ep_filter_rel(resource_name='project_endpoint'):
- {
- 'href-template': ('/OS-EP-FILTER/projects/{project_id}'
- '/endpoints/{endpoint_id}'),
- 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID,
- 'project_id': json_home.Parameters.PROJECT_ID, }},
- _build_ep_filter_rel(resource_name='project_endpoints'):
- {
- 'href-template': '/OS-EP-FILTER/projects/{project_id}/endpoints',
- 'href-vars': {'project_id': json_home.Parameters.PROJECT_ID, }},
- _build_ep_filter_rel(
- resource_name='projects_associated_with_endpoint_group'):
- {
- 'href-template': BASE_EP_FILTER + '/projects',
- 'href-vars': {'endpoint_group_id':
- ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
- _build_os_inherit_rel(
- resource_name='domain_user_role_inherited_to_projects'):
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
- '{user_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- _build_os_inherit_rel(
- resource_name='domain_group_role_inherited_to_projects'):
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
- '{group_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group_id': json_home.Parameters.GROUP_ID,
- 'role_id': json_home.Parameters.ROLE_ID, }},
- _build_os_inherit_rel(
- resource_name='domain_user_roles_inherited_to_projects'):
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
- '{user_id}/roles/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- _build_os_inherit_rel(
- resource_name='domain_group_roles_inherited_to_projects'):
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
- '{group_id}/roles/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group_id': json_home.Parameters.GROUP_ID, }},
- _build_os_inherit_rel(
- resource_name='project_user_role_inherited_to_projects'):
- {
- 'href-template': '/OS-INHERIT/projects/{project_id}/users/'
- '{user_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'project_id': json_home.Parameters.PROJECT_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- 'user_id': json_home.Parameters.USER_ID, }},
- _build_os_inherit_rel(
- resource_name='project_group_role_inherited_to_projects'):
- {
- 'href-template': '/OS-INHERIT/projects/{project_id}/groups/'
- '{group_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'project_id': json_home.Parameters.PROJECT_ID,
- 'group_id': json_home.Parameters.GROUP_ID,
- 'role_id': json_home.Parameters.ROLE_ID, }},
- json_home.build_v3_resource_relation('domain_config'): {
- 'href-template':
- '/domains/{domain_id}/config',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID},
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('domain_config_group'): {
- 'href-template':
- '/domains/{domain_id}/config/{group}',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group': json_home.build_v3_parameter_relation('config_group')},
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('domain_config_option'): {
- 'href-template':
- '/domains/{domain_id}/config/{group}/{option}',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group': json_home.build_v3_parameter_relation('config_group'),
- 'option': json_home.build_v3_parameter_relation('config_option')},
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('domain_config_default'): {
- 'href': '/domains/config/default',
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('domain_config_default_group'): {
- 'href-template': '/domains/config/{group}/default',
- 'href-vars': {
- 'group': json_home.build_v3_parameter_relation('config_group')},
- 'hints': {'status': 'experimental'}},
- json_home.build_v3_resource_relation('domain_config_default_option'): {
- 'href-template': '/domains/config/{group}/{option}/default',
- 'href-vars': {
- 'group': json_home.build_v3_parameter_relation('config_group'),
- 'option': json_home.build_v3_parameter_relation('config_option')},
- 'hints': {'status': 'experimental'}},
-}
-
-
-class TestClient(object):
- def __init__(self, app=None, token=None):
- self.app = app
- self.token = token
-
- def request(self, method, path, headers=None, body=None):
- if headers is None:
- headers = {}
-
- if self.token:
- headers.setdefault('X-Auth-Token', self.token)
-
- req = webob.Request.blank(path)
- req.method = method
- for k, v in headers.items():
- req.headers[k] = v
- if body:
- req.body = body
- return req.get_response(self.app)
-
- def get(self, path, headers=None):
- return self.request('GET', path=path, headers=headers)
-
- def post(self, path, headers=None, body=None):
- return self.request('POST', path=path, headers=headers, body=body)
-
- def put(self, path, headers=None, body=None):
- return self.request('PUT', path=path, headers=headers, body=body)
-
-
-class _VersionsEqual(tt_matchers.MatchesListwise):
- def __init__(self, expected):
- super(_VersionsEqual, self).__init__([
- tt_matchers.KeysEqual(expected),
- tt_matchers.KeysEqual(expected['versions']),
- tt_matchers.HasLength(len(expected['versions']['values'])),
- tt_matchers.ContainsAll(expected['versions']['values']),
- ])
-
- def match(self, other):
- return super(_VersionsEqual, self).match([
- other,
- other['versions'],
- other['versions']['values'],
- other['versions']['values'],
- ])
-
-
-class VersionTestCase(unit.TestCase):
- def setUp(self):
- super(VersionTestCase, self).setUp()
- self.load_backends()
- self.public_app = self.loadapp('keystone', 'main')
- self.admin_app = self.loadapp('keystone', 'admin')
-
- self.config_fixture.config(
- public_endpoint='http://localhost:%(public_port)d',
- admin_endpoint='http://localhost:%(admin_port)d')
-
- def config_overrides(self):
- super(VersionTestCase, self).config_overrides()
- admin_port = random.randint(10000, 30000)
- public_port = random.randint(40000, 60000)
- self.config_fixture.config(group='eventlet_server',
- public_port=public_port,
- admin_port=admin_port)
-
- def _paste_in_port(self, response, port):
- for link in response['links']:
- if link['rel'] == 'self':
- link['href'] = port
-
- def test_public_versions(self):
- client = TestClient(self.public_app)
- resp = client.get('/')
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = VERSIONS_RESPONSE
- for version in expected['versions']['values']:
- if version['id'].startswith('v3'):
- self._paste_in_port(
- version, 'http://localhost:%s/v3/' %
- CONF.eventlet_server.public_port)
- elif version['id'] == 'v2.0':
- self._paste_in_port(
- version, 'http://localhost:%s/v2.0/' %
- CONF.eventlet_server.public_port)
- self.assertThat(data, _VersionsEqual(expected))
-
- def test_admin_versions(self):
- client = TestClient(self.admin_app)
- resp = client.get('/')
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = VERSIONS_RESPONSE
- for version in expected['versions']['values']:
- if version['id'].startswith('v3'):
- self._paste_in_port(
- version, 'http://localhost:%s/v3/' %
- CONF.eventlet_server.admin_port)
- elif version['id'] == 'v2.0':
- self._paste_in_port(
- version, 'http://localhost:%s/v2.0/' %
- CONF.eventlet_server.admin_port)
- self.assertThat(data, _VersionsEqual(expected))
-
- def test_use_site_url_if_endpoint_unset(self):
- self.config_fixture.config(public_endpoint=None, admin_endpoint=None)
-
- for app in (self.public_app, self.admin_app):
- client = TestClient(app)
- resp = client.get('/')
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = VERSIONS_RESPONSE
- for version in expected['versions']['values']:
- # localhost happens to be the site url for tests
- if version['id'].startswith('v3'):
- self._paste_in_port(
- version, 'http://localhost/v3/')
- elif version['id'] == 'v2.0':
- self._paste_in_port(
- version, 'http://localhost/v2.0/')
- self.assertThat(data, _VersionsEqual(expected))
-
- def test_public_version_v2(self):
- client = TestClient(self.public_app)
- resp = client.get('/v2.0/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v2_VERSION_RESPONSE
- self._paste_in_port(expected['version'],
- 'http://localhost:%s/v2.0/' %
- CONF.eventlet_server.public_port)
- self.assertEqual(expected, data)
-
- def test_admin_version_v2(self):
- client = TestClient(self.admin_app)
- resp = client.get('/v2.0/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v2_VERSION_RESPONSE
- self._paste_in_port(expected['version'],
- 'http://localhost:%s/v2.0/' %
- CONF.eventlet_server.admin_port)
- self.assertEqual(expected, data)
-
- def test_use_site_url_if_endpoint_unset_v2(self):
- self.config_fixture.config(public_endpoint=None, admin_endpoint=None)
- for app in (self.public_app, self.admin_app):
- client = TestClient(app)
- resp = client.get('/v2.0/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v2_VERSION_RESPONSE
- self._paste_in_port(expected['version'], 'http://localhost/v2.0/')
- self.assertEqual(data, expected)
-
- def test_public_version_v3(self):
- client = TestClient(self.public_app)
- resp = client.get('/v3/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v3_VERSION_RESPONSE
- self._paste_in_port(expected['version'],
- 'http://localhost:%s/v3/' %
- CONF.eventlet_server.public_port)
- self.assertEqual(expected, data)
-
- @utils.wip('waiting on bug #1381961')
- def test_admin_version_v3(self):
- client = TestClient(self.admin_app)
- resp = client.get('/v3/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v3_VERSION_RESPONSE
- self._paste_in_port(expected['version'],
- 'http://localhost:%s/v3/' %
- CONF.eventlet_server.admin_port)
- self.assertEqual(expected, data)
-
- def test_use_site_url_if_endpoint_unset_v3(self):
- self.config_fixture.config(public_endpoint=None, admin_endpoint=None)
- for app in (self.public_app, self.admin_app):
- client = TestClient(app)
- resp = client.get('/v3/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v3_VERSION_RESPONSE
- self._paste_in_port(expected['version'], 'http://localhost/v3/')
- self.assertEqual(expected, data)
-
- @mock.patch.object(controllers, '_VERSIONS', ['v3'])
- def test_v2_disabled(self):
- client = TestClient(self.public_app)
- # request to /v2.0 should fail
- resp = client.get('/v2.0/')
- self.assertEqual(http_client.NOT_FOUND, resp.status_int)
-
- # request to /v3 should pass
- resp = client.get('/v3/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v3_VERSION_RESPONSE
- self._paste_in_port(expected['version'],
- 'http://localhost:%s/v3/' %
- CONF.eventlet_server.public_port)
- self.assertEqual(expected, data)
-
- # only v3 information should be displayed by requests to /
- v3_only_response = {
- "versions": {
- "values": [
- v3_EXPECTED_RESPONSE
- ]
- }
- }
- self._paste_in_port(v3_only_response['versions']['values'][0],
- 'http://localhost:%s/v3/' %
- CONF.eventlet_server.public_port)
- resp = client.get('/')
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- self.assertEqual(v3_only_response, data)
-
- @mock.patch.object(controllers, '_VERSIONS', ['v2.0'])
- def test_v3_disabled(self):
- client = TestClient(self.public_app)
- # request to /v3 should fail
- resp = client.get('/v3/')
- self.assertEqual(http_client.NOT_FOUND, resp.status_int)
-
- # request to /v2.0 should pass
- resp = client.get('/v2.0/')
- self.assertEqual(http_client.OK, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = v2_VERSION_RESPONSE
- self._paste_in_port(expected['version'],
- 'http://localhost:%s/v2.0/' %
- CONF.eventlet_server.public_port)
- self.assertEqual(expected, data)
-
- # only v2 information should be displayed by requests to /
- v2_only_response = {
- "versions": {
- "values": [
- v2_EXPECTED_RESPONSE
- ]
- }
- }
- self._paste_in_port(v2_only_response['versions']['values'][0],
- 'http://localhost:%s/v2.0/' %
- CONF.eventlet_server.public_port)
- resp = client.get('/')
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- self.assertEqual(v2_only_response, data)
-
- def _test_json_home(self, path, exp_json_home_data):
- client = TestClient(self.public_app)
- resp = client.get(path, headers={'Accept': 'application/json-home'})
-
- self.assertThat(resp.status, tt_matchers.Equals('200 OK'))
- self.assertThat(resp.headers['Content-Type'],
- tt_matchers.Equals('application/json-home'))
-
- self.assertThat(jsonutils.loads(resp.body),
- tt_matchers.Equals(exp_json_home_data))
-
- def test_json_home_v3(self):
- # If the request is /v3 and the Accept header is application/json-home
- # then the server responds with a JSON Home document.
-
- exp_json_home_data = {
- 'resources': V3_JSON_HOME_RESOURCES}
-
- self._test_json_home('/v3', exp_json_home_data)
-
- def test_json_home_root(self):
- # If the request is / and the Accept header is application/json-home
- # then the server responds with a JSON Home document.
-
- exp_json_home_data = copy.deepcopy({
- 'resources': V3_JSON_HOME_RESOURCES})
- json_home.translate_urls(exp_json_home_data, '/v3')
-
- self._test_json_home('/', exp_json_home_data)
-
- def test_accept_type_handling(self):
- # Accept headers with multiple types and qvalues are handled.
-
- def make_request(accept_types=None):
- client = TestClient(self.public_app)
- headers = None
- if accept_types:
- headers = {'Accept': accept_types}
- resp = client.get('/v3', headers=headers)
- self.assertThat(resp.status, tt_matchers.Equals('200 OK'))
- return resp.headers['Content-Type']
-
- JSON = controllers.MimeTypes.JSON
- JSON_HOME = controllers.MimeTypes.JSON_HOME
-
- JSON_MATCHER = tt_matchers.Equals(JSON)
- JSON_HOME_MATCHER = tt_matchers.Equals(JSON_HOME)
-
- # Default is JSON.
- self.assertThat(make_request(), JSON_MATCHER)
-
- # Can request JSON and get JSON.
- self.assertThat(make_request(JSON), JSON_MATCHER)
-
- # Can request JSONHome and get JSONHome.
- self.assertThat(make_request(JSON_HOME), JSON_HOME_MATCHER)
-
- # If request JSON, JSON Home get JSON.
- accept_types = '%s, %s' % (JSON, JSON_HOME)
- self.assertThat(make_request(accept_types), JSON_MATCHER)
-
- # If request JSON Home, JSON get JSON.
- accept_types = '%s, %s' % (JSON_HOME, JSON)
- self.assertThat(make_request(accept_types), JSON_MATCHER)
-
- # If request JSON Home, JSON;q=0.5 get JSON Home.
- accept_types = '%s, %s;q=0.5' % (JSON_HOME, JSON)
- self.assertThat(make_request(accept_types), JSON_HOME_MATCHER)
-
- # If request some unknown mime-type, get JSON.
- self.assertThat(make_request(self.getUniqueString()), JSON_MATCHER)
-
- @mock.patch.object(controllers, '_VERSIONS', [])
- def test_no_json_home_document_returned_when_v3_disabled(self):
- json_home_document = controllers.request_v3_json_home('some_prefix')
- expected_document = {'resources': {}}
- self.assertEqual(expected_document, json_home_document)
-
- def test_extension_property_method_returns_none(self):
- extension_obj = controllers.Extensions()
- extensions_property = extension_obj.extensions
- self.assertIsNone(extensions_property)
-
-
-class VersionSingleAppTestCase(unit.TestCase):
- """Tests running with a single application loaded.
-
- These are important because when Keystone is running in Apache httpd
- there's only one application loaded for each instance.
-
- """
-
- def setUp(self):
- super(VersionSingleAppTestCase, self).setUp()
- self.load_backends()
-
- self.config_fixture.config(
- public_endpoint='http://localhost:%(public_port)d',
- admin_endpoint='http://localhost:%(admin_port)d')
-
- def config_overrides(self):
- super(VersionSingleAppTestCase, self).config_overrides()
- admin_port = random.randint(10000, 30000)
- public_port = random.randint(40000, 60000)
- self.config_fixture.config(group='eventlet_server',
- public_port=public_port,
- admin_port=admin_port)
-
- def _paste_in_port(self, response, port):
- for link in response['links']:
- if link['rel'] == 'self':
- link['href'] = port
-
- def _test_version(self, app_name):
- def app_port():
- if app_name == 'admin':
- return CONF.eventlet_server.admin_port
- else:
- return CONF.eventlet_server.public_port
- app = self.loadapp('keystone', app_name)
- client = TestClient(app)
- resp = client.get('/')
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = VERSIONS_RESPONSE
- for version in expected['versions']['values']:
- if version['id'].startswith('v3'):
- self._paste_in_port(
- version, 'http://localhost:%s/v3/' % app_port())
- elif version['id'] == 'v2.0':
- self._paste_in_port(
- version, 'http://localhost:%s/v2.0/' % app_port())
- self.assertThat(data, _VersionsEqual(expected))
-
- def test_public(self):
- self._test_version('main')
-
- def test_admin(self):
- self._test_version('admin')
-
-
-class VersionBehindSslTestCase(unit.TestCase):
- def setUp(self):
- super(VersionBehindSslTestCase, self).setUp()
- self.load_backends()
- self.public_app = self.loadapp('keystone', 'main')
-
- def config_overrides(self):
- super(VersionBehindSslTestCase, self).config_overrides()
- self.config_fixture.config(
- secure_proxy_ssl_header='HTTP_X_FORWARDED_PROTO')
-
- def _paste_in_port(self, response, port):
- for link in response['links']:
- if link['rel'] == 'self':
- link['href'] = port
-
- def _get_expected(self, host):
- expected = VERSIONS_RESPONSE
- for version in expected['versions']['values']:
- if version['id'].startswith('v3'):
- self._paste_in_port(version, host + 'v3/')
- elif version['id'] == 'v2.0':
- self._paste_in_port(version, host + 'v2.0/')
- return expected
-
- def test_versions_without_headers(self):
- client = TestClient(self.public_app)
- host_name = 'host-%d' % random.randint(10, 30)
- host_port = random.randint(10000, 30000)
- host = 'http://%s:%s/' % (host_name, host_port)
- resp = client.get(host)
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = self._get_expected(host)
- self.assertThat(data, _VersionsEqual(expected))
-
- def test_versions_with_header(self):
- client = TestClient(self.public_app)
- host_name = 'host-%d' % random.randint(10, 30)
- host_port = random.randint(10000, 30000)
- resp = client.get('http://%s:%s/' % (host_name, host_port),
- headers={'X-Forwarded-Proto': 'https'})
- self.assertEqual(300, resp.status_int)
- data = jsonutils.loads(resp.body)
- expected = self._get_expected('https://%s:%s/' % (host_name,
- host_port))
- self.assertThat(data, _VersionsEqual(expected))
diff --git a/keystone-moon/keystone/tests/unit/test_wsgi.py b/keystone-moon/keystone/tests/unit/test_wsgi.py
deleted file mode 100644
index 564d7406..00000000
--- a/keystone-moon/keystone/tests/unit/test_wsgi.py
+++ /dev/null
@@ -1,586 +0,0 @@
-# encoding: utf-8
-#
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import gettext
-import socket
-import uuid
-
-import eventlet
-import mock
-import oslo_i18n
-from oslo_serialization import jsonutils
-import six
-from six.moves import http_client
-from testtools import matchers
-import webob
-
-from keystone.common import environment
-from keystone.common import wsgi
-from keystone import exception
-from keystone.tests import unit
-
-
-class FakeApp(wsgi.Application):
- def index(self, context):
- return {'a': 'b'}
-
-
-class FakeAttributeCheckerApp(wsgi.Application):
- def index(self, context):
- return context['query_string']
-
- def assert_attribute(self, body, attr):
- """Asserts that the given request has a certain attribute."""
- ref = jsonutils.loads(body)
- self._require_attribute(ref, attr)
-
- def assert_attributes(self, body, attr):
- """Asserts that the given request has a certain set attributes."""
- ref = jsonutils.loads(body)
- self._require_attributes(ref, attr)
-
-
-class RouterTest(unit.TestCase):
- def setUp(self):
- self.router = wsgi.RoutersBase()
- super(RouterTest, self).setUp()
-
- def test_invalid_status(self):
- fake_mapper = uuid.uuid4().hex
- fake_controller = uuid.uuid4().hex
- fake_path = uuid.uuid4().hex
- fake_rel = uuid.uuid4().hex
- self.assertRaises(exception.Error,
- self.router._add_resource,
- fake_mapper, fake_controller, fake_path, fake_rel,
- status=uuid.uuid4().hex)
-
-
-class BaseWSGITest(unit.TestCase):
- def setUp(self):
- self.app = FakeApp()
- super(BaseWSGITest, self).setUp()
-
- def _make_request(self, url='/'):
- req = webob.Request.blank(url)
- args = {'action': 'index', 'controller': None}
- req.environ['wsgiorg.routing_args'] = [None, args]
- return req
-
-
-class ApplicationTest(BaseWSGITest):
- def test_response_content_type(self):
- req = self._make_request()
- resp = req.get_response(self.app)
- self.assertEqual('application/json', resp.content_type)
-
- def test_query_string_available(self):
- class FakeApp(wsgi.Application):
- def index(self, context):
- return context['query_string']
- req = self._make_request(url='/?1=2')
- resp = req.get_response(FakeApp())
- self.assertEqual({'1': '2'}, jsonutils.loads(resp.body))
-
- def test_headers_available(self):
- class FakeApp(wsgi.Application):
- def index(self, context):
- return context['headers']
-
- app = FakeApp()
- req = self._make_request(url='/?1=2')
- req.headers['X-Foo'] = "bar"
- resp = req.get_response(app)
- self.assertIn('X-Foo', eval(resp.body))
-
- def test_render_response(self):
- data = {'attribute': 'value'}
- body = b'{"attribute": "value"}'
-
- resp = wsgi.render_response(body=data)
- self.assertEqual('200 OK', resp.status)
- self.assertEqual(http_client.OK, resp.status_int)
- self.assertEqual(body, resp.body)
- self.assertEqual('X-Auth-Token', resp.headers.get('Vary'))
- self.assertEqual(str(len(body)), resp.headers.get('Content-Length'))
-
- def test_render_response_custom_status(self):
- resp = wsgi.render_response(
- status=(http_client.NOT_IMPLEMENTED, 'Not Implemented'))
- self.assertEqual('501 Not Implemented', resp.status)
- self.assertEqual(http_client.NOT_IMPLEMENTED, resp.status_int)
-
- def test_successful_require_attribute(self):
- app = FakeAttributeCheckerApp()
- req = self._make_request(url='/?1=2')
- resp = req.get_response(app)
- app.assert_attribute(resp.body, '1')
-
- def test_require_attribute_fail_if_attribute_not_present(self):
- app = FakeAttributeCheckerApp()
- req = self._make_request(url='/?1=2')
- resp = req.get_response(app)
- self.assertRaises(exception.ValidationError,
- app.assert_attribute, resp.body, 'a')
-
- def test_successful_require_multiple_attributes(self):
- app = FakeAttributeCheckerApp()
- req = self._make_request(url='/?a=1&b=2')
- resp = req.get_response(app)
- app.assert_attributes(resp.body, ['a', 'b'])
-
- def test_attribute_missing_from_request(self):
- app = FakeAttributeCheckerApp()
- req = self._make_request(url='/?a=1&b=2')
- resp = req.get_response(app)
- ex = self.assertRaises(exception.ValidationError,
- app.assert_attributes,
- resp.body, ['a', 'missing_attribute'])
- self.assertThat(six.text_type(ex),
- matchers.Contains('missing_attribute'))
-
- def test_no_required_attributes_present(self):
- app = FakeAttributeCheckerApp()
- req = self._make_request(url='/')
- resp = req.get_response(app)
-
- ex = self.assertRaises(exception.ValidationError,
- app.assert_attributes, resp.body,
- ['missing_attribute1', 'missing_attribute2'])
- self.assertThat(six.text_type(ex),
- matchers.Contains('missing_attribute1'))
- self.assertThat(six.text_type(ex),
- matchers.Contains('missing_attribute2'))
-
- def test_render_response_custom_headers(self):
- resp = wsgi.render_response(headers=[('Custom-Header', 'Some-Value')])
- self.assertEqual('Some-Value', resp.headers.get('Custom-Header'))
- self.assertEqual('X-Auth-Token', resp.headers.get('Vary'))
-
- def test_render_response_non_str_headers_converted(self):
- resp = wsgi.render_response(
- headers=[('Byte-Header', 'Byte-Value'),
- (u'Unicode-Header', u'Unicode-Value')])
- # assert that all headers are identified.
- self.assertThat(resp.headers, matchers.HasLength(4))
- self.assertEqual('Unicode-Value', resp.headers.get('Unicode-Header'))
- # assert that unicode value is converted, the expected type is str
- # on both python2 and python3.
- self.assertEqual(str,
- type(resp.headers.get('Unicode-Header')))
-
- def test_render_response_no_body(self):
- resp = wsgi.render_response()
- self.assertEqual('204 No Content', resp.status)
- self.assertEqual(http_client.NO_CONTENT, resp.status_int)
- self.assertEqual(b'', resp.body)
- self.assertEqual('0', resp.headers.get('Content-Length'))
- self.assertIsNone(resp.headers.get('Content-Type'))
-
- def test_render_response_head_with_body(self):
- resp = wsgi.render_response({'id': uuid.uuid4().hex}, method='HEAD')
- self.assertEqual(http_client.OK, resp.status_int)
- self.assertEqual(b'', resp.body)
- self.assertNotEqual('0', resp.headers.get('Content-Length'))
- self.assertEqual('application/json', resp.headers.get('Content-Type'))
-
- def test_application_local_config(self):
- class FakeApp(wsgi.Application):
- def __init__(self, *args, **kwargs):
- self.kwargs = kwargs
-
- app = FakeApp.factory({}, testkey="test")
- self.assertIn("testkey", app.kwargs)
- self.assertEqual("test", app.kwargs["testkey"])
-
- def test_render_exception(self):
- e = exception.Unauthorized(message=u'\u7f51\u7edc')
- resp = wsgi.render_exception(e)
- self.assertEqual(http_client.UNAUTHORIZED, resp.status_int)
-
- def test_render_exception_host(self):
- e = exception.Unauthorized(message=u'\u7f51\u7edc')
- req = self._make_request(url='/')
- context = {'host_url': 'http://%s:5000' % uuid.uuid4().hex,
- 'environment': req.environ}
- resp = wsgi.render_exception(e, context=context)
-
- self.assertEqual(http_client.UNAUTHORIZED, resp.status_int)
-
- def test_improperly_encoded_params(self):
- class FakeApp(wsgi.Application):
- def index(self, context):
- return context['query_string']
- # this is high bit set ASCII, copy & pasted from Windows.
- # aka code page 1252. It is not valid UTF8.
- req = self._make_request(url='/?name=nonexit%E8nt')
- self.assertRaises(exception.ValidationError, req.get_response,
- FakeApp())
-
- def test_properly_encoded_params(self):
- class FakeApp(wsgi.Application):
- def index(self, context):
- return context['query_string']
- # nonexitènt encoded as UTF-8
- req = self._make_request(url='/?name=nonexit%C3%A8nt')
- resp = req.get_response(FakeApp())
- self.assertEqual({'name': u'nonexit\xe8nt'},
- jsonutils.loads(resp.body))
-
- def test_base_url(self):
- class FakeApp(wsgi.Application):
- def index(self, context):
- return self.base_url(context, 'public')
- req = self._make_request(url='/')
- # NOTE(gyee): according to wsgiref, if HTTP_HOST is present in the
- # request environment, it will be used to construct the base url.
- # SERVER_NAME and SERVER_PORT will be ignored. These are standard
- # WSGI environment variables populated by the webserver.
- req.environ.update({
- 'SCRIPT_NAME': '/identity',
- 'SERVER_NAME': '1.2.3.4',
- 'wsgi.url_scheme': 'http',
- 'SERVER_PORT': '80',
- 'HTTP_HOST': '1.2.3.4',
- })
- resp = req.get_response(FakeApp())
- self.assertEqual(b"http://1.2.3.4/identity", resp.body)
-
- # if HTTP_HOST is absent, SERVER_NAME and SERVER_PORT will be used
- req = self._make_request(url='/')
- del req.environ['HTTP_HOST']
- req.environ.update({
- 'SCRIPT_NAME': '/identity',
- 'SERVER_NAME': '1.1.1.1',
- 'wsgi.url_scheme': 'http',
- 'SERVER_PORT': '1234',
- })
- resp = req.get_response(FakeApp())
- self.assertEqual(b"http://1.1.1.1:1234/identity", resp.body)
-
- # make sure keystone normalize the standard HTTP port 80 by stripping
- # it
- req = self._make_request(url='/')
- req.environ.update({'HTTP_HOST': 'foo:80',
- 'SCRIPT_NAME': '/identity'})
- resp = req.get_response(FakeApp())
- self.assertEqual(b"http://foo/identity", resp.body)
-
- # make sure keystone normalize the standard HTTPS port 443 by stripping
- # it
- req = self._make_request(url='/')
- req.environ.update({'HTTP_HOST': 'foo:443',
- 'SCRIPT_NAME': '/identity',
- 'wsgi.url_scheme': 'https'})
- resp = req.get_response(FakeApp())
- self.assertEqual(b"https://foo/identity", resp.body)
-
- # make sure non-standard port is preserved
- req = self._make_request(url='/')
- req.environ.update({'HTTP_HOST': 'foo:1234',
- 'SCRIPT_NAME': '/identity'})
- resp = req.get_response(FakeApp())
- self.assertEqual(b"http://foo:1234/identity", resp.body)
-
- # make sure version portion of the SCRIPT_NAME, '/v2.0', is stripped
- # from base url
- req = self._make_request(url='/')
- req.environ.update({'HTTP_HOST': 'foo:80',
- 'SCRIPT_NAME': '/bar/identity/v2.0'})
- resp = req.get_response(FakeApp())
- self.assertEqual(b"http://foo/bar/identity", resp.body)
-
- # make sure version portion of the SCRIPT_NAME, '/v3' is stripped from
- # base url
- req = self._make_request(url='/')
- req.environ.update({'HTTP_HOST': 'foo:80',
- 'SCRIPT_NAME': '/identity/v3'})
- resp = req.get_response(FakeApp())
- self.assertEqual(b"http://foo/identity", resp.body)
-
-
-class ExtensionRouterTest(BaseWSGITest):
- def test_extensionrouter_local_config(self):
- class FakeRouter(wsgi.ExtensionRouter):
- def __init__(self, *args, **kwargs):
- self.kwargs = kwargs
-
- factory = FakeRouter.factory({}, testkey="test")
- app = factory(self.app)
- self.assertIn("testkey", app.kwargs)
- self.assertEqual("test", app.kwargs["testkey"])
-
-
-class MiddlewareTest(BaseWSGITest):
- def test_middleware_request(self):
- class FakeMiddleware(wsgi.Middleware):
- def process_request(self, req):
- req.environ['fake_request'] = True
- return req
- req = self._make_request()
- resp = FakeMiddleware(None)(req)
- self.assertIn('fake_request', resp.environ)
-
- def test_middleware_response(self):
- class FakeMiddleware(wsgi.Middleware):
- def process_response(self, request, response):
- response.environ = {}
- response.environ['fake_response'] = True
- return response
- req = self._make_request()
- resp = FakeMiddleware(self.app)(req)
- self.assertIn('fake_response', resp.environ)
-
- def test_middleware_bad_request(self):
- class FakeMiddleware(wsgi.Middleware):
- def process_response(self, request, response):
- raise exception.Unauthorized()
-
- req = self._make_request()
- req.environ['REMOTE_ADDR'] = '127.0.0.1'
- resp = FakeMiddleware(self.app)(req)
- self.assertEqual(exception.Unauthorized.code, resp.status_int)
-
- def test_middleware_type_error(self):
- class FakeMiddleware(wsgi.Middleware):
- def process_response(self, request, response):
- raise TypeError()
-
- req = self._make_request()
- req.environ['REMOTE_ADDR'] = '127.0.0.1'
- resp = FakeMiddleware(self.app)(req)
- # This is a validationerror type
- self.assertEqual(exception.ValidationError.code, resp.status_int)
-
- def test_middleware_exception_error(self):
-
- exception_str = b'EXCEPTIONERROR'
-
- class FakeMiddleware(wsgi.Middleware):
- def process_response(self, request, response):
- raise exception.UnexpectedError(exception_str)
-
- def do_request():
- req = self._make_request()
- resp = FakeMiddleware(self.app)(req)
- self.assertEqual(exception.UnexpectedError.code, resp.status_int)
- return resp
-
- # Exception data should not be in the message when insecure_debug is
- # False
- self.config_fixture.config(debug=False, insecure_debug=False)
- self.assertNotIn(exception_str, do_request().body)
-
- # Exception data should be in the message when insecure_debug is True
- self.config_fixture.config(debug=True, insecure_debug=True)
- self.assertIn(exception_str, do_request().body)
-
-
-class LocalizedResponseTest(unit.TestCase):
- def test_request_match_default(self):
- # The default language if no Accept-Language is provided is None
- req = webob.Request.blank('/')
- self.assertIsNone(wsgi.best_match_language(req))
-
- @mock.patch.object(oslo_i18n, 'get_available_languages')
- def test_request_match_language_expected(self, mock_gal):
- # If Accept-Language is a supported language, best_match_language()
- # returns it.
-
- language = uuid.uuid4().hex
- mock_gal.return_value = [language]
-
- req = webob.Request.blank('/', headers={'Accept-Language': language})
- self.assertEqual(language, wsgi.best_match_language(req))
-
- @mock.patch.object(oslo_i18n, 'get_available_languages')
- def test_request_match_language_unexpected(self, mock_gal):
- # If Accept-Language is a language we do not support,
- # best_match_language() returns None.
-
- supported_language = uuid.uuid4().hex
- mock_gal.return_value = [supported_language]
-
- request_language = uuid.uuid4().hex
- req = webob.Request.blank(
- '/', headers={'Accept-Language': request_language})
- self.assertIsNone(wsgi.best_match_language(req))
-
- def test_static_translated_string_is_lazy_translatable(self):
- # Statically created message strings are an object that can get
- # lazy-translated rather than a regular string.
- self.assertNotEqual(six.text_type,
- type(exception.Unauthorized.message_format))
-
- @mock.patch.object(oslo_i18n, 'get_available_languages')
- def test_get_localized_response(self, mock_gal):
- # If the request has the Accept-Language set to a supported language
- # and an exception is raised by the application that is translatable
- # then the response will have the translated message.
-
- language = uuid.uuid4().hex
- mock_gal.return_value = [language]
-
- # The arguments for the xlated message format have to match the args
- # for the chosen exception (exception.NotFound)
- xlated_msg_fmt = "Xlated NotFound, %(target)s."
-
- # Fake out gettext.translation() to return a translator for our
- # expected language and a passthrough translator for other langs.
-
- def fake_translation(*args, **kwargs):
- class IdentityTranslator(object):
- def ugettext(self, msgid):
- return msgid
-
- gettext = ugettext
-
- class LangTranslator(object):
- def ugettext(self, msgid):
- if msgid == exception.NotFound.message_format:
- return xlated_msg_fmt
- return msgid
-
- gettext = ugettext
-
- if language in kwargs.get('languages', []):
- return LangTranslator()
- return IdentityTranslator()
-
- with mock.patch.object(gettext, 'translation',
- side_effect=fake_translation) as xlation_mock:
- target = uuid.uuid4().hex
-
- # Fake app raises NotFound exception to simulate Keystone raising.
-
- class FakeApp(wsgi.Application):
- def index(self, context):
- raise exception.NotFound(target=target)
-
- # Make the request with Accept-Language on the app, expect an error
- # response with the translated message.
-
- req = webob.Request.blank('/')
- args = {'action': 'index', 'controller': None}
- req.environ['wsgiorg.routing_args'] = [None, args]
- req.headers['Accept-Language'] = language
- resp = req.get_response(FakeApp())
-
- # Assert that the translated message appears in the response.
-
- exp_msg = xlated_msg_fmt % dict(target=target)
- self.assertThat(resp.json['error']['message'],
- matchers.Equals(exp_msg))
- self.assertThat(xlation_mock.called, matchers.Equals(True))
-
-
-class ServerTest(unit.TestCase):
-
- def setUp(self):
- super(ServerTest, self).setUp()
- self.host = '127.0.0.1'
- self.port = '1234'
-
- @mock.patch('eventlet.listen')
- @mock.patch('socket.getaddrinfo')
- def test_keepalive_unset(self, mock_getaddrinfo, mock_listen):
- mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)]
- mock_sock_dup = mock_listen.return_value.dup.return_value
-
- server = environment.Server(mock.MagicMock(), host=self.host,
- port=self.port)
- server.start()
- self.addCleanup(server.stop)
- self.assertTrue(mock_listen.called)
- self.assertFalse(mock_sock_dup.setsockopt.called)
-
- @mock.patch('eventlet.listen')
- @mock.patch('socket.getaddrinfo')
- def test_keepalive_set(self, mock_getaddrinfo, mock_listen):
- mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)]
- mock_sock_dup = mock_listen.return_value.dup.return_value
-
- server = environment.Server(mock.MagicMock(), host=self.host,
- port=self.port, keepalive=True)
- server.start()
- self.addCleanup(server.stop)
- mock_sock_dup.setsockopt.assert_called_once_with(socket.SOL_SOCKET,
- socket.SO_KEEPALIVE,
- 1)
- self.assertTrue(mock_listen.called)
-
- @mock.patch('eventlet.listen')
- @mock.patch('socket.getaddrinfo')
- def test_keepalive_and_keepidle_set(self, mock_getaddrinfo, mock_listen):
- mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)]
- mock_sock_dup = mock_listen.return_value.dup.return_value
-
- server = environment.Server(mock.MagicMock(), host=self.host,
- port=self.port, keepalive=True,
- keepidle=1)
- server.start()
- self.addCleanup(server.stop)
-
- if hasattr(socket, 'TCP_KEEPIDLE'):
- self.assertEqual(2, mock_sock_dup.setsockopt.call_count)
- # Test the last set of call args i.e. for the keepidle
- mock_sock_dup.setsockopt.assert_called_with(socket.IPPROTO_TCP,
- socket.TCP_KEEPIDLE,
- 1)
- else:
- self.assertEqual(1, mock_sock_dup.setsockopt.call_count)
-
- self.assertTrue(mock_listen.called)
-
- def test_client_socket_timeout(self):
- # mocking server method of eventlet.wsgi to check it is called with
- # configured 'client_socket_timeout' value.
- for socket_timeout in range(1, 10):
- self.config_fixture.config(group='eventlet_server',
- client_socket_timeout=socket_timeout)
- server = environment.Server(mock.MagicMock(), host=self.host,
- port=self.port)
- with mock.patch.object(eventlet.wsgi, 'server') as mock_server:
- fake_application = uuid.uuid4().hex
- fake_socket = uuid.uuid4().hex
- server._run(fake_application, fake_socket)
- mock_server.assert_called_once_with(
- fake_socket,
- fake_application,
- debug=mock.ANY,
- socket_timeout=socket_timeout,
- log=mock.ANY,
- keepalive=mock.ANY)
-
- def test_wsgi_keep_alive(self):
- # mocking server method of eventlet.wsgi to check it is called with
- # configured 'wsgi_keep_alive' value.
- wsgi_keepalive = False
- self.config_fixture.config(group='eventlet_server',
- wsgi_keep_alive=wsgi_keepalive)
-
- server = environment.Server(mock.MagicMock(), host=self.host,
- port=self.port)
- with mock.patch.object(eventlet.wsgi, 'server') as mock_server:
- fake_application = uuid.uuid4().hex
- fake_socket = uuid.uuid4().hex
- server._run(fake_application, fake_socket)
- mock_server.assert_called_once_with(fake_socket,
- fake_application,
- debug=mock.ANY,
- socket_timeout=mock.ANY,
- log=mock.ANY,
- keepalive=wsgi_keepalive)
diff --git a/keystone-moon/keystone/tests/unit/tests/__init__.py b/keystone-moon/keystone/tests/unit/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/tests/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/tests/test_core.py b/keystone-moon/keystone/tests/unit/tests/test_core.py
deleted file mode 100644
index 56e42bcc..00000000
--- a/keystone-moon/keystone/tests/unit/tests/test_core.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2014 IBM Corp.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import sys
-import warnings
-
-from oslo_log import log
-from sqlalchemy import exc
-from testtools import matchers
-
-from keystone.tests import unit
-
-
-LOG = log.getLogger(__name__)
-
-
-class BaseTestTestCase(unit.BaseTestCase):
-
- def test_unexpected_exit(self):
- # if a test calls sys.exit it raises rather than exiting.
- self.assertThat(lambda: sys.exit(),
- matchers.raises(unit.UnexpectedExit))
-
-
-class TestTestCase(unit.TestCase):
-
- def test_bad_log(self):
- # If the arguments are invalid for the string in a log it raises an
- # exception during testing.
- self.assertThat(
- lambda: LOG.warning('String %(p1)s %(p2)s', {'p1': 'something'}),
- matchers.raises(KeyError))
-
- def test_sa_warning(self):
- self.assertThat(
- lambda: warnings.warn('test sa warning error', exc.SAWarning),
- matchers.raises(exc.SAWarning))
-
- def test_deprecation_warnings_are_raised_as_exceptions_in_tests(self):
- self.assertThat(
- lambda: warnings.warn('this is deprecated', DeprecationWarning),
- matchers.raises(DeprecationWarning))
diff --git a/keystone-moon/keystone/tests/unit/tests/test_utils.py b/keystone-moon/keystone/tests/unit/tests/test_utils.py
deleted file mode 100644
index 22c485c0..00000000
--- a/keystone-moon/keystone/tests/unit/tests/test_utils.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from testtools import matchers
-from testtools import testcase
-
-from keystone.tests.unit import utils
-
-
-class TestWipDecorator(testcase.TestCase):
-
- def test_raises_SkipError_when_broken_test_fails(self):
-
- @utils.wip('waiting on bug #000000')
- def test():
- raise Exception('i expected a failure - this is a WIP')
-
- e = self.assertRaises(testcase.TestSkipped, test)
- self.assertThat(str(e), matchers.Contains('#000000'))
-
- def test_raises_AssertionError_when_test_passes(self):
-
- @utils.wip('waiting on bug #000000')
- def test():
- pass # literally
-
- e = self.assertRaises(AssertionError, test)
- self.assertThat(str(e), matchers.Contains('#000000'))
diff --git a/keystone-moon/keystone/tests/unit/token/__init__.py b/keystone-moon/keystone/tests/unit/token/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/token/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/token/test_backends.py b/keystone-moon/keystone/tests/unit/token/test_backends.py
deleted file mode 100644
index feb7e017..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_backends.py
+++ /dev/null
@@ -1,551 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import datetime
-import hashlib
-import uuid
-
-from keystoneclient.common import cms
-from oslo_config import cfg
-from oslo_utils import timeutils
-import six
-from six.moves import range
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.tests.unit import utils as test_utils
-from keystone.token import provider
-
-
-CONF = cfg.CONF
-NULL_OBJECT = object()
-
-
-class TokenTests(object):
- def _create_token_id(self):
- # Use a token signed by the cms module
- token_id = ""
- for i in range(1, 20):
- token_id += uuid.uuid4().hex
- return cms.cms_sign_token(token_id,
- CONF.signing.certfile,
- CONF.signing.keyfile)
-
- def _assert_revoked_token_list_matches_token_persistence(
- self, revoked_token_id_list):
- # Assert that the list passed in matches the list returned by the
- # token persistence service
- persistence_list = [
- x['id']
- for x in self.token_provider_api.list_revoked_tokens()
- ]
- self.assertEqual(persistence_list, revoked_token_id_list)
-
- def test_token_crud(self):
- token_id = self._create_token_id()
- data = {'id': token_id, 'a': 'b',
- 'trust_id': None,
- 'user': {'id': 'testuserid'},
- 'token_data': {'access': {'token': {
- 'audit_ids': [uuid.uuid4().hex]}}}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- expires = data_ref.pop('expires')
- data_ref.pop('user_id')
- self.assertIsInstance(expires, datetime.datetime)
- data_ref.pop('id')
- data.pop('id')
- self.assertDictEqual(data, data_ref)
-
- new_data_ref = self.token_provider_api._persistence.get_token(token_id)
- expires = new_data_ref.pop('expires')
- self.assertIsInstance(expires, datetime.datetime)
- new_data_ref.pop('user_id')
- new_data_ref.pop('id')
-
- self.assertEqual(data, new_data_ref)
-
- self.token_provider_api._persistence.delete_token(token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.get_token, token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.delete_token, token_id)
-
- def create_token_sample_data(self, token_id=None, tenant_id=None,
- trust_id=None, user_id=None, expires=None):
- if token_id is None:
- token_id = self._create_token_id()
- if user_id is None:
- user_id = 'testuserid'
- # FIXME(morganfainberg): These tokens look nothing like "Real" tokens.
- # This should be fixed when token issuance is cleaned up.
- data = {'id': token_id, 'a': 'b',
- 'user': {'id': user_id},
- 'access': {'token': {'audit_ids': [uuid.uuid4().hex]}}}
- if tenant_id is not None:
- data['tenant'] = {'id': tenant_id, 'name': tenant_id}
- if tenant_id is NULL_OBJECT:
- data['tenant'] = None
- if expires is not None:
- data['expires'] = expires
- if trust_id is not None:
- data['trust_id'] = trust_id
- data['access'].setdefault('trust', {})
- # Testuserid2 is used here since a trustee will be different in
- # the cases of impersonation and therefore should not match the
- # token's user_id.
- data['access']['trust']['trustee_user_id'] = 'testuserid2'
- data['token_version'] = provider.V2
- # Issue token stores a copy of all token data at token['token_data'].
- # This emulates that assumption as part of the test.
- data['token_data'] = copy.deepcopy(data)
- new_token = self.token_provider_api._persistence.create_token(token_id,
- data)
- return new_token['id'], data
-
- def test_delete_tokens(self):
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(0, len(tokens))
- token_id1, data = self.create_token_sample_data(
- tenant_id='testtenantid')
- token_id2, data = self.create_token_sample_data(
- tenant_id='testtenantid')
- token_id3, data = self.create_token_sample_data(
- tenant_id='testtenantid',
- user_id='testuserid1')
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(2, len(tokens))
- self.assertIn(token_id2, tokens)
- self.assertIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_tokens(
- user_id='testuserid',
- tenant_id='testtenantid')
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(0, len(tokens))
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id1)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id2)
-
- self.token_provider_api._persistence.get_token(token_id3)
-
- def test_delete_tokens_trust(self):
- tokens = self.token_provider_api._persistence._list_tokens(
- user_id='testuserid')
- self.assertEqual(0, len(tokens))
- token_id1, data = self.create_token_sample_data(
- tenant_id='testtenantid',
- trust_id='testtrustid')
- token_id2, data = self.create_token_sample_data(
- tenant_id='testtenantid',
- user_id='testuserid1',
- trust_id='testtrustid1')
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_tokens(
- user_id='testuserid',
- tenant_id='testtenantid',
- trust_id='testtrustid')
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id1)
- self.token_provider_api._persistence.get_token(token_id2)
-
- def _test_token_list(self, token_list_fn):
- tokens = token_list_fn('testuserid')
- self.assertEqual(0, len(tokens))
- token_id1, data = self.create_token_sample_data()
- tokens = token_list_fn('testuserid')
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id1, tokens)
- token_id2, data = self.create_token_sample_data()
- tokens = token_list_fn('testuserid')
- self.assertEqual(2, len(tokens))
- self.assertIn(token_id2, tokens)
- self.assertIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_token(token_id1)
- tokens = token_list_fn('testuserid')
- self.assertIn(token_id2, tokens)
- self.assertNotIn(token_id1, tokens)
- self.token_provider_api._persistence.delete_token(token_id2)
- tokens = token_list_fn('testuserid')
- self.assertNotIn(token_id2, tokens)
- self.assertNotIn(token_id1, tokens)
-
- # tenant-specific tokens
- tenant1 = uuid.uuid4().hex
- tenant2 = uuid.uuid4().hex
- token_id3, data = self.create_token_sample_data(tenant_id=tenant1)
- token_id4, data = self.create_token_sample_data(tenant_id=tenant2)
- # test for existing but empty tenant (LP:1078497)
- token_id5, data = self.create_token_sample_data(tenant_id=NULL_OBJECT)
- tokens = token_list_fn('testuserid')
- self.assertEqual(3, len(tokens))
- self.assertNotIn(token_id1, tokens)
- self.assertNotIn(token_id2, tokens)
- self.assertIn(token_id3, tokens)
- self.assertIn(token_id4, tokens)
- self.assertIn(token_id5, tokens)
- tokens = token_list_fn('testuserid', tenant2)
- self.assertEqual(1, len(tokens))
- self.assertNotIn(token_id1, tokens)
- self.assertNotIn(token_id2, tokens)
- self.assertNotIn(token_id3, tokens)
- self.assertIn(token_id4, tokens)
-
- def test_token_list(self):
- self._test_token_list(
- self.token_provider_api._persistence._list_tokens)
-
- def test_token_list_trust(self):
- trust_id = uuid.uuid4().hex
- token_id5, data = self.create_token_sample_data(trust_id=trust_id)
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid', trust_id=trust_id)
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id5, tokens)
-
- def test_get_token_returns_not_found(self):
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- uuid.uuid4().hex)
-
- def test_delete_token_returns_not_found(self):
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.delete_token,
- uuid.uuid4().hex)
-
- def test_expired_token(self):
- token_id = uuid.uuid4().hex
- expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- data_ref.pop('user_id')
- self.assertDictEqual(data, data_ref)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- token_id)
-
- def test_null_expires_token(self):
- token_id = uuid.uuid4().hex
- data = {'id': token_id, 'id_hash': token_id, 'a': 'b', 'expires': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- self.assertIsNotNone(data_ref['expires'])
- new_data_ref = self.token_provider_api._persistence.get_token(token_id)
-
- # MySQL doesn't store microseconds, so discard them before testing
- data_ref['expires'] = data_ref['expires'].replace(microsecond=0)
- new_data_ref['expires'] = new_data_ref['expires'].replace(
- microsecond=0)
-
- self.assertEqual(data_ref, new_data_ref)
-
- def check_list_revoked_tokens(self, token_infos):
- revocation_list = self.token_provider_api.list_revoked_tokens()
- revoked_ids = [x['id'] for x in revocation_list]
- revoked_audit_ids = [x['audit_id'] for x in revocation_list]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- for token_id, audit_id in token_infos:
- self.assertIn(token_id, revoked_ids)
- self.assertIn(audit_id, revoked_audit_ids)
-
- def delete_token(self):
- token_id = uuid.uuid4().hex
- audit_id = uuid.uuid4().hex
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'user': {'id': 'testuserid'},
- 'token_data': {'token': {'audit_ids': [audit_id]}}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- self.token_provider_api._persistence.delete_token(token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- data_ref['id'])
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api._persistence.delete_token,
- data_ref['id'])
- return (token_id, audit_id)
-
- def test_list_revoked_tokens_returns_empty_list(self):
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertEqual([], revoked_ids)
-
- def test_list_revoked_tokens_for_single_token(self):
- self.check_list_revoked_tokens([self.delete_token()])
-
- def test_list_revoked_tokens_for_multiple_tokens(self):
- self.check_list_revoked_tokens([self.delete_token()
- for x in range(2)])
-
- def test_flush_expired_token(self):
- token_id = uuid.uuid4().hex
- expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- data_ref.pop('user_id')
- self.assertDictEqual(data, data_ref)
-
- token_id = uuid.uuid4().hex
- expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
- data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'}}
- data_ref = self.token_provider_api._persistence.create_token(token_id,
- data)
- data_ref.pop('user_id')
- self.assertDictEqual(data, data_ref)
-
- self.token_provider_api._persistence.flush_expired_tokens()
- tokens = self.token_provider_api._persistence._list_tokens(
- 'testuserid')
- self.assertEqual(1, len(tokens))
- self.assertIn(token_id, tokens)
-
- @unit.skip_if_cache_disabled('token')
- def test_revocation_list_cache(self):
- expire_time = timeutils.utcnow() + datetime.timedelta(minutes=10)
- token_id = uuid.uuid4().hex
- token_data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'},
- 'token_data': {'token': {
- 'audit_ids': [uuid.uuid4().hex]}}}
- token2_id = uuid.uuid4().hex
- token2_data = {'id_hash': token2_id, 'id': token2_id, 'a': 'b',
- 'expires': expire_time,
- 'trust_id': None,
- 'user': {'id': 'testuserid'},
- 'token_data': {'token': {
- 'audit_ids': [uuid.uuid4().hex]}}}
- # Create 2 Tokens.
- self.token_provider_api._persistence.create_token(token_id,
- token_data)
- self.token_provider_api._persistence.create_token(token2_id,
- token2_data)
- # Verify the revocation list is empty.
- self.assertEqual(
- [], self.token_provider_api._persistence.list_revoked_tokens())
- self.assertEqual([], self.token_provider_api.list_revoked_tokens())
- # Delete a token directly, bypassing the manager.
- self.token_provider_api._persistence.driver.delete_token(token_id)
- # Verify the revocation list is still empty.
- self.assertEqual(
- [], self.token_provider_api._persistence.list_revoked_tokens())
- self.assertEqual([], self.token_provider_api.list_revoked_tokens())
- # Invalidate the revocation list.
- self.token_provider_api._persistence.invalidate_revocation_list()
- # Verify the deleted token is in the revocation list.
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertIn(token_id, revoked_ids)
- # Delete the second token, through the manager
- self.token_provider_api._persistence.delete_token(token2_id)
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- # Verify both tokens are in the revocation list.
- self.assertIn(token_id, revoked_ids)
- self.assertIn(token2_id, revoked_ids)
-
- def _test_predictable_revoked_pki_token_id(self, hash_fn):
- token_id = self._create_token_id()
- token_id_hash = hash_fn(token_id.encode('utf-8')).hexdigest()
- token = {'user': {'id': uuid.uuid4().hex},
- 'token_data': {'token': {'audit_ids': [uuid.uuid4().hex]}}}
-
- self.token_provider_api._persistence.create_token(token_id, token)
- self.token_provider_api._persistence.delete_token(token_id)
-
- revoked_ids = [x['id']
- for x in self.token_provider_api.list_revoked_tokens()]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertIn(token_id_hash, revoked_ids)
- self.assertNotIn(token_id, revoked_ids)
- for t in self.token_provider_api._persistence.list_revoked_tokens():
- self.assertIn('expires', t)
-
- def test_predictable_revoked_pki_token_id_default(self):
- self._test_predictable_revoked_pki_token_id(hashlib.md5)
-
- def test_predictable_revoked_pki_token_id_sha256(self):
- self.config_fixture.config(group='token', hash_algorithm='sha256')
- self._test_predictable_revoked_pki_token_id(hashlib.sha256)
-
- def test_predictable_revoked_uuid_token_id(self):
- token_id = uuid.uuid4().hex
- token = {'user': {'id': uuid.uuid4().hex},
- 'token_data': {'token': {'audit_ids': [uuid.uuid4().hex]}}}
-
- self.token_provider_api._persistence.create_token(token_id, token)
- self.token_provider_api._persistence.delete_token(token_id)
-
- revoked_tokens = self.token_provider_api.list_revoked_tokens()
- revoked_ids = [x['id'] for x in revoked_tokens]
- self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
- self.assertIn(token_id, revoked_ids)
- for t in revoked_tokens:
- self.assertIn('expires', t)
-
- def test_create_unicode_token_id(self):
- token_id = six.text_type(self._create_token_id())
- self.create_token_sample_data(token_id=token_id)
- self.token_provider_api._persistence.get_token(token_id)
-
- def test_create_unicode_user_id(self):
- user_id = six.text_type(uuid.uuid4().hex)
- token_id, data = self.create_token_sample_data(user_id=user_id)
- self.token_provider_api._persistence.get_token(token_id)
-
- def test_token_expire_timezone(self):
-
- @test_utils.timezone
- def _create_token(expire_time):
- token_id = uuid.uuid4().hex
- user_id = six.text_type(uuid.uuid4().hex)
- return self.create_token_sample_data(token_id=token_id,
- user_id=user_id,
- expires=expire_time)
-
- for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']:
- test_utils.TZ = 'UTC' + d
- expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
- token_id, data_in = _create_token(expire_time)
- data_get = self.token_provider_api._persistence.get_token(token_id)
-
- self.assertEqual(data_in['id'], data_get['id'],
- 'TZ=%s' % test_utils.TZ)
-
- expire_time_expired = (
- timeutils.utcnow() + datetime.timedelta(minutes=-1))
- token_id, data_in = _create_token(expire_time_expired)
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api._persistence.get_token,
- data_in['id'])
-
-
-class TokenCacheInvalidation(object):
- def _create_test_data(self):
- self.user = unit.new_user_ref(
- domain_id=CONF.identity.default_domain_id)
- self.tenant = unit.new_project_ref(
- domain_id=CONF.identity.default_domain_id)
-
- # Create an equivalent of a scoped token
- token_dict = {'user': self.user, 'tenant': self.tenant,
- 'metadata': {}, 'id': 'placeholder'}
- token_id, data = self.token_provider_api.issue_v2_token(token_dict)
- self.scoped_token_id = token_id
-
- # ..and an un-scoped one
- token_dict = {'user': self.user, 'tenant': None,
- 'metadata': {}, 'id': 'placeholder'}
- token_id, data = self.token_provider_api.issue_v2_token(token_dict)
- self.unscoped_token_id = token_id
-
- # Validate them, in the various ways possible - this will load the
- # responses into the token cache.
- self._check_scoped_tokens_are_valid()
- self._check_unscoped_tokens_are_valid()
-
- def _check_unscoped_tokens_are_invalid(self):
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_token,
- self.unscoped_token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- self.unscoped_token_id)
-
- def _check_scoped_tokens_are_invalid(self):
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_token,
- self.scoped_token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_token,
- self.scoped_token_id,
- self.tenant['id'])
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- self.scoped_token_id)
- self.assertRaises(
- exception.TokenNotFound,
- self.token_provider_api.validate_v2_token,
- self.scoped_token_id,
- self.tenant['id'])
-
- def _check_scoped_tokens_are_valid(self):
- self.token_provider_api.validate_token(self.scoped_token_id)
- self.token_provider_api.validate_token(
- self.scoped_token_id, belongs_to=self.tenant['id'])
- self.token_provider_api.validate_v2_token(self.scoped_token_id)
- self.token_provider_api.validate_v2_token(
- self.scoped_token_id, belongs_to=self.tenant['id'])
-
- def _check_unscoped_tokens_are_valid(self):
- self.token_provider_api.validate_token(self.unscoped_token_id)
- self.token_provider_api.validate_v2_token(self.unscoped_token_id)
-
- def test_delete_unscoped_token(self):
- self.token_provider_api._persistence.delete_token(
- self.unscoped_token_id)
- self._check_unscoped_tokens_are_invalid()
- self._check_scoped_tokens_are_valid()
-
- def test_delete_scoped_token_by_id(self):
- self.token_provider_api._persistence.delete_token(self.scoped_token_id)
- self._check_scoped_tokens_are_invalid()
- self._check_unscoped_tokens_are_valid()
-
- def test_delete_scoped_token_by_user(self):
- self.token_provider_api._persistence.delete_tokens(self.user['id'])
- # Since we are deleting all tokens for this user, they should all
- # now be invalid.
- self._check_scoped_tokens_are_invalid()
- self._check_unscoped_tokens_are_invalid()
-
- def test_delete_scoped_token_by_user_and_tenant(self):
- self.token_provider_api._persistence.delete_tokens(
- self.user['id'],
- tenant_id=self.tenant['id'])
- self._check_scoped_tokens_are_invalid()
- self._check_unscoped_tokens_are_valid()
diff --git a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
deleted file mode 100644
index 5f51d7b3..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
+++ /dev/null
@@ -1,611 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import base64
-import datetime
-import hashlib
-import os
-import uuid
-
-import msgpack
-from oslo_utils import timeutils
-from six.moves import urllib
-
-from keystone.common import config
-from keystone.common import utils
-from keystone import exception
-from keystone.federation import constants as federation_constants
-from keystone.tests import unit
-from keystone.tests.unit import ksfixtures
-from keystone.tests.unit.ksfixtures import database
-from keystone.token import provider
-from keystone.token.providers import fernet
-from keystone.token.providers.fernet import token_formatters
-from keystone.token.providers.fernet import utils as fernet_utils
-
-
-CONF = config.CONF
-
-
-class TestFernetTokenProvider(unit.TestCase):
- def setUp(self):
- super(TestFernetTokenProvider, self).setUp()
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
- self.provider = fernet.Provider()
-
- def test_supports_bind_authentication_returns_false(self):
- self.assertFalse(self.provider._supports_bind_authentication)
-
- def test_needs_persistence_returns_false(self):
- self.assertFalse(self.provider.needs_persistence())
-
- def test_invalid_v3_token_raises_token_not_found(self):
- # NOTE(lbragstad): Here we use the validate_non_persistent_token()
- # methods because the validate_v3_token() method is strictly for
- # validating UUID formatted tokens. It is written to assume cached
- # tokens from a backend, where validate_non_persistent_token() is not.
- token_id = uuid.uuid4().hex
- e = self.assertRaises(
- exception.TokenNotFound,
- self.provider.validate_non_persistent_token,
- token_id)
- self.assertIn(token_id, u'%s' % e)
-
- def test_invalid_v2_token_raises_token_not_found(self):
- token_id = uuid.uuid4().hex
- e = self.assertRaises(
- exception.TokenNotFound,
- self.provider.validate_non_persistent_token,
- token_id)
- self.assertIn(token_id, u'%s' % e)
-
-
-class TestValidate(unit.TestCase):
- def setUp(self):
- super(TestValidate, self).setUp()
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
- self.useFixture(database.Database())
- self.load_backends()
-
- def config_overrides(self):
- super(TestValidate, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
-
- def test_validate_v3_token_simple(self):
- # Check the fields in the token result when use validate_v3_token
- # with a simple token.
-
- domain_ref = unit.new_domain_ref()
- domain_ref = self.resource_api.create_domain(domain_ref['id'],
- domain_ref)
-
- user_ref = unit.new_user_ref(domain_ref['id'])
- user_ref = self.identity_api.create_user(user_ref)
-
- method_names = ['password']
- token_id, token_data_ = self.token_provider_api.issue_v3_token(
- user_ref['id'], method_names)
-
- token_data = self.token_provider_api.validate_v3_token(token_id)
- token = token_data['token']
- self.assertIsInstance(token['audit_ids'], list)
- self.assertIsInstance(token['expires_at'], str)
- self.assertIsInstance(token['issued_at'], str)
- self.assertEqual(method_names, token['methods'])
- exp_user_info = {
- 'id': user_ref['id'],
- 'name': user_ref['name'],
- 'domain': {
- 'id': domain_ref['id'],
- 'name': domain_ref['name'],
- },
- }
- self.assertEqual(exp_user_info, token['user'])
-
- def test_validate_v3_token_federated_info(self):
- # Check the user fields in the token result when use validate_v3_token
- # when the token has federated info.
-
- domain_ref = unit.new_domain_ref()
- domain_ref = self.resource_api.create_domain(domain_ref['id'],
- domain_ref)
-
- user_ref = unit.new_user_ref(domain_ref['id'])
- user_ref = self.identity_api.create_user(user_ref)
-
- method_names = ['mapped']
-
- group_ids = [uuid.uuid4().hex, ]
- identity_provider = uuid.uuid4().hex
- protocol = uuid.uuid4().hex
- auth_context = {
- 'user_id': user_ref['id'],
- 'group_ids': group_ids,
- federation_constants.IDENTITY_PROVIDER: identity_provider,
- federation_constants.PROTOCOL: protocol,
- }
- token_id, token_data_ = self.token_provider_api.issue_v3_token(
- user_ref['id'], method_names, auth_context=auth_context)
-
- token_data = self.token_provider_api.validate_v3_token(token_id)
- token = token_data['token']
- exp_user_info = {
- 'id': user_ref['id'],
- 'name': user_ref['id'],
- 'domain': {'id': CONF.federation.federated_domain_name,
- 'name': CONF.federation.federated_domain_name, },
- federation_constants.FEDERATION: {
- 'groups': [{'id': group_id} for group_id in group_ids],
- 'identity_provider': {'id': identity_provider, },
- 'protocol': {'id': protocol, },
- },
- }
- self.assertEqual(exp_user_info, token['user'])
-
- def test_validate_v3_token_trust(self):
- # Check the trust fields in the token result when use validate_v3_token
- # when the token has trust info.
-
- domain_ref = unit.new_domain_ref()
- domain_ref = self.resource_api.create_domain(domain_ref['id'],
- domain_ref)
-
- user_ref = unit.new_user_ref(domain_ref['id'])
- user_ref = self.identity_api.create_user(user_ref)
-
- trustor_user_ref = unit.new_user_ref(domain_ref['id'])
- trustor_user_ref = self.identity_api.create_user(trustor_user_ref)
-
- project_ref = unit.new_project_ref(domain_id=domain_ref['id'])
- project_ref = self.resource_api.create_project(project_ref['id'],
- project_ref)
-
- role_ref = unit.new_role_ref()
- role_ref = self.role_api.create_role(role_ref['id'], role_ref)
-
- self.assignment_api.create_grant(
- role_ref['id'], user_id=user_ref['id'],
- project_id=project_ref['id'])
-
- self.assignment_api.create_grant(
- role_ref['id'], user_id=trustor_user_ref['id'],
- project_id=project_ref['id'])
-
- trustor_user_id = trustor_user_ref['id']
- trustee_user_id = user_ref['id']
- trust_ref = unit.new_trust_ref(
- trustor_user_id, trustee_user_id, project_id=project_ref['id'],
- role_ids=[role_ref['id'], ])
- trust_ref = self.trust_api.create_trust(trust_ref['id'], trust_ref,
- trust_ref['roles'])
-
- method_names = ['password']
-
- token_id, token_data_ = self.token_provider_api.issue_v3_token(
- user_ref['id'], method_names, project_id=project_ref['id'],
- trust=trust_ref)
-
- token_data = self.token_provider_api.validate_v3_token(token_id)
- token = token_data['token']
- exp_trust_info = {
- 'id': trust_ref['id'],
- 'impersonation': False,
- 'trustee_user': {'id': user_ref['id'], },
- 'trustor_user': {'id': trustor_user_ref['id'], },
- }
- self.assertEqual(exp_trust_info, token['OS-TRUST:trust'])
-
- def test_validate_v3_token_validation_error_exc(self):
- # When the token format isn't recognized, TokenNotFound is raised.
-
- # A uuid string isn't a valid Fernet token.
- token_id = uuid.uuid4().hex
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_v3_token, token_id)
-
-
-class TestTokenFormatter(unit.TestCase):
- def setUp(self):
- super(TestTokenFormatter, self).setUp()
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- def test_restore_padding(self):
- # 'a' will result in '==' padding, 'aa' will result in '=' padding, and
- # 'aaa' will result in no padding.
- binary_to_test = [b'a', b'aa', b'aaa']
-
- for binary in binary_to_test:
- # base64.urlsafe_b64encode takes six.binary_type and returns
- # six.binary_type.
- encoded_string = base64.urlsafe_b64encode(binary)
- encoded_string = encoded_string.decode('utf-8')
- # encoded_string is now six.text_type.
- encoded_str_without_padding = encoded_string.rstrip('=')
- self.assertFalse(encoded_str_without_padding.endswith('='))
- encoded_str_with_padding_restored = (
- token_formatters.TokenFormatter.restore_padding(
- encoded_str_without_padding)
- )
- self.assertEqual(encoded_string, encoded_str_with_padding_restored)
-
- def test_legacy_padding_validation(self):
- first_value = uuid.uuid4().hex
- second_value = uuid.uuid4().hex
- payload = (first_value, second_value)
- msgpack_payload = msgpack.packb(payload)
- # msgpack_payload is six.binary_type.
-
- tf = token_formatters.TokenFormatter()
-
- # NOTE(lbragstad): This method preserves the way that keystone used to
- # percent encode the tokens, prior to bug #1491926.
- def legacy_pack(payload):
- # payload is six.binary_type.
- encrypted_payload = tf.crypto.encrypt(payload)
- # encrypted_payload is six.binary_type.
-
- # the encrypted_payload is returned with padding appended
- self.assertTrue(encrypted_payload.endswith(b'='))
-
- # using urllib.parse.quote will percent encode the padding, like
- # keystone did in Kilo.
- percent_encoded_payload = urllib.parse.quote(encrypted_payload)
- # percent_encoded_payload is six.text_type.
-
- # ensure that the padding was actually percent encoded
- self.assertTrue(percent_encoded_payload.endswith('%3D'))
- return percent_encoded_payload
-
- token_with_legacy_padding = legacy_pack(msgpack_payload)
- # token_with_legacy_padding is six.text_type.
-
- # demonstrate the we can validate a payload that has been percent
- # encoded with the Fernet logic that existed in Kilo
- serialized_payload = tf.unpack(token_with_legacy_padding)
- # serialized_payload is six.binary_type.
- returned_payload = msgpack.unpackb(serialized_payload)
- # returned_payload contains six.binary_type.
- self.assertEqual(first_value, returned_payload[0].decode('utf-8'))
- self.assertEqual(second_value, returned_payload[1].decode('utf-8'))
-
-
-class TestPayloads(unit.TestCase):
- def assertTimestampsEqual(self, expected, actual):
- # The timestamp that we get back when parsing the payload may not
- # exactly match the timestamp that was put in the payload due to
- # conversion to and from a float.
-
- exp_time = timeutils.parse_isotime(expected)
- actual_time = timeutils.parse_isotime(actual)
-
- # the granularity of timestamp string is microseconds and it's only the
- # last digit in the representation that's different, so use a delta
- # just above nanoseconds.
- return self.assertCloseEnoughForGovernmentWork(exp_time, actual_time,
- delta=1e-05)
-
- def test_uuid_hex_to_byte_conversions(self):
- payload_cls = token_formatters.BasePayload
-
- expected_hex_uuid = uuid.uuid4().hex
- uuid_obj = uuid.UUID(expected_hex_uuid)
- expected_uuid_in_bytes = uuid_obj.bytes
- actual_uuid_in_bytes = payload_cls.convert_uuid_hex_to_bytes(
- expected_hex_uuid)
- self.assertEqual(expected_uuid_in_bytes, actual_uuid_in_bytes)
- actual_hex_uuid = payload_cls.convert_uuid_bytes_to_hex(
- expected_uuid_in_bytes)
- self.assertEqual(expected_hex_uuid, actual_hex_uuid)
-
- def test_time_string_to_float_conversions(self):
- payload_cls = token_formatters.BasePayload
-
- original_time_str = utils.isotime(subsecond=True)
- time_obj = timeutils.parse_isotime(original_time_str)
- expected_time_float = (
- (timeutils.normalize_time(time_obj) -
- datetime.datetime.utcfromtimestamp(0)).total_seconds())
-
- # NOTE(lbragstad): The token expiration time for Fernet tokens is
- # passed in the payload of the token. This is different from the token
- # creation time, which is handled by Fernet and doesn't support
- # subsecond precision because it is a timestamp integer.
- self.assertIsInstance(expected_time_float, float)
-
- actual_time_float = payload_cls._convert_time_string_to_float(
- original_time_str)
- self.assertIsInstance(actual_time_float, float)
- self.assertEqual(expected_time_float, actual_time_float)
-
- # Generate expected_time_str using the same time float. Using
- # original_time_str from utils.isotime will occasionally fail due to
- # floating point rounding differences.
- time_object = datetime.datetime.utcfromtimestamp(actual_time_float)
- expected_time_str = utils.isotime(time_object, subsecond=True)
-
- actual_time_str = payload_cls._convert_float_to_time_string(
- actual_time_float)
- self.assertEqual(expected_time_str, actual_time_str)
-
- def _test_payload(self, payload_class, exp_user_id=None, exp_methods=None,
- exp_project_id=None, exp_domain_id=None,
- exp_trust_id=None, exp_federated_info=None,
- exp_access_token_id=None):
- exp_user_id = exp_user_id or uuid.uuid4().hex
- exp_methods = exp_methods or ['password']
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
-
- payload = payload_class.assemble(
- exp_user_id, exp_methods, exp_project_id, exp_domain_id,
- exp_expires_at, exp_audit_ids, exp_trust_id, exp_federated_info,
- exp_access_token_id)
-
- (user_id, methods, project_id,
- domain_id, expires_at, audit_ids,
- trust_id, federated_info,
- access_token_id) = payload_class.disassemble(payload)
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertTimestampsEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
- self.assertEqual(exp_project_id, project_id)
- self.assertEqual(exp_domain_id, domain_id)
- self.assertEqual(exp_trust_id, trust_id)
- self.assertEqual(exp_access_token_id, access_token_id)
-
- if exp_federated_info:
- self.assertDictEqual(exp_federated_info, federated_info)
- else:
- self.assertIsNone(federated_info)
-
- def test_unscoped_payload(self):
- self._test_payload(token_formatters.UnscopedPayload)
-
- def test_project_scoped_payload(self):
- self._test_payload(token_formatters.ProjectScopedPayload,
- exp_project_id=uuid.uuid4().hex)
-
- def test_domain_scoped_payload(self):
- self._test_payload(token_formatters.DomainScopedPayload,
- exp_domain_id=uuid.uuid4().hex)
-
- def test_domain_scoped_payload_with_default_domain(self):
- self._test_payload(token_formatters.DomainScopedPayload,
- exp_domain_id=CONF.identity.default_domain_id)
-
- def test_trust_scoped_payload(self):
- self._test_payload(token_formatters.TrustScopedPayload,
- exp_project_id=uuid.uuid4().hex,
- exp_trust_id=uuid.uuid4().hex)
-
- def test_unscoped_payload_with_non_uuid_user_id(self):
- self._test_payload(token_formatters.UnscopedPayload,
- exp_user_id='someNonUuidUserId')
-
- def test_unscoped_payload_with_16_char_non_uuid_user_id(self):
- self._test_payload(token_formatters.UnscopedPayload,
- exp_user_id='0123456789abcdef')
-
- def test_project_scoped_payload_with_non_uuid_ids(self):
- self._test_payload(token_formatters.ProjectScopedPayload,
- exp_user_id='someNonUuidUserId',
- exp_project_id='someNonUuidProjectId')
-
- def test_project_scoped_payload_with_16_char_non_uuid_ids(self):
- self._test_payload(token_formatters.ProjectScopedPayload,
- exp_user_id='0123456789abcdef',
- exp_project_id='0123456789abcdef')
-
- def test_domain_scoped_payload_with_non_uuid_user_id(self):
- self._test_payload(token_formatters.DomainScopedPayload,
- exp_user_id='nonUuidUserId',
- exp_domain_id=uuid.uuid4().hex)
-
- def test_domain_scoped_payload_with_16_char_non_uuid_user_id(self):
- self._test_payload(token_formatters.DomainScopedPayload,
- exp_user_id='0123456789abcdef',
- exp_domain_id=uuid.uuid4().hex)
-
- def test_trust_scoped_payload_with_non_uuid_ids(self):
- self._test_payload(token_formatters.TrustScopedPayload,
- exp_user_id='someNonUuidUserId',
- exp_project_id='someNonUuidProjectId',
- exp_trust_id=uuid.uuid4().hex)
-
- def test_trust_scoped_payload_with_16_char_non_uuid_ids(self):
- self._test_payload(token_formatters.TrustScopedPayload,
- exp_user_id='0123456789abcdef',
- exp_project_id='0123456789abcdef',
- exp_trust_id=uuid.uuid4().hex)
-
- def _test_federated_payload_with_ids(self, exp_user_id, exp_group_id):
- exp_federated_info = {'group_ids': [{'id': exp_group_id}],
- 'idp_id': uuid.uuid4().hex,
- 'protocol_id': uuid.uuid4().hex}
-
- self._test_payload(token_formatters.FederatedUnscopedPayload,
- exp_user_id=exp_user_id,
- exp_federated_info=exp_federated_info)
-
- def test_federated_payload_with_non_uuid_ids(self):
- self._test_federated_payload_with_ids('someNonUuidUserId',
- 'someNonUuidGroupId')
-
- def test_federated_payload_with_16_char_non_uuid_ids(self):
- self._test_federated_payload_with_ids('0123456789abcdef',
- '0123456789abcdef')
-
- def test_federated_project_scoped_payload(self):
- exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
- 'idp_id': uuid.uuid4().hex,
- 'protocol_id': uuid.uuid4().hex}
-
- self._test_payload(token_formatters.FederatedProjectScopedPayload,
- exp_user_id='someNonUuidUserId',
- exp_methods=['token'],
- exp_project_id=uuid.uuid4().hex,
- exp_federated_info=exp_federated_info)
-
- def test_federated_domain_scoped_payload(self):
- exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
- 'idp_id': uuid.uuid4().hex,
- 'protocol_id': uuid.uuid4().hex}
-
- self._test_payload(token_formatters.FederatedDomainScopedPayload,
- exp_user_id='someNonUuidUserId',
- exp_methods=['token'],
- exp_domain_id=uuid.uuid4().hex,
- exp_federated_info=exp_federated_info)
-
- def test_oauth_scoped_payload(self):
- self._test_payload(token_formatters.OauthScopedPayload,
- exp_project_id=uuid.uuid4().hex,
- exp_access_token_id=uuid.uuid4().hex)
-
-
-class TestFernetKeyRotation(unit.TestCase):
- def setUp(self):
- super(TestFernetKeyRotation, self).setUp()
-
- # A collection of all previously-seen signatures of the key
- # repository's contents.
- self.key_repo_signatures = set()
-
- @property
- def keys(self):
- """Key files converted to numbers."""
- return sorted(
- int(x) for x in os.listdir(CONF.fernet_tokens.key_repository))
-
- @property
- def key_repository_size(self):
- """The number of keys in the key repository."""
- return len(self.keys)
-
- @property
- def key_repository_signature(self):
- """Create a "thumbprint" of the current key repository.
-
- Because key files are renamed, this produces a hash of the contents of
- the key files, ignoring their filenames.
-
- The resulting signature can be used, for example, to ensure that you
- have a unique set of keys after you perform a key rotation (taking a
- static set of keys, and simply shuffling them, would fail such a test).
-
- """
- # Load the keys into a list, keys is list of six.text_type.
- keys = fernet_utils.load_keys()
-
- # Sort the list of keys by the keys themselves (they were previously
- # sorted by filename).
- keys.sort()
-
- # Create the thumbprint using all keys in the repository.
- signature = hashlib.sha1()
- for key in keys:
- # Need to convert key to six.binary_type for update.
- signature.update(key.encode('utf-8'))
- return signature.hexdigest()
-
- def assertRepositoryState(self, expected_size):
- """Validate the state of the key repository."""
- self.assertEqual(expected_size, self.key_repository_size)
- self.assertUniqueRepositoryState()
-
- def assertUniqueRepositoryState(self):
- """Ensures that the current key repo state has not been seen before."""
- # This is assigned to a variable because it takes some work to
- # calculate.
- signature = self.key_repository_signature
-
- # Ensure the signature is not in the set of previously seen signatures.
- self.assertNotIn(signature, self.key_repo_signatures)
-
- # Add the signature to the set of repository signatures to validate
- # that we don't see it again later.
- self.key_repo_signatures.add(signature)
-
- def test_rotation(self):
- # Initializing a key repository results in this many keys. We don't
- # support max_active_keys being set any lower.
- min_active_keys = 2
-
- # Simulate every rotation strategy up to "rotating once a week while
- # maintaining a year's worth of keys."
- for max_active_keys in range(min_active_keys, 52 + 1):
- self.config_fixture.config(group='fernet_tokens',
- max_active_keys=max_active_keys)
-
- # Ensure that resetting the key repository always results in 2
- # active keys.
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- # Validate the initial repository state.
- self.assertRepositoryState(expected_size=min_active_keys)
-
- # The repository should be initialized with a staged key (0) and a
- # primary key (1). The next key is just auto-incremented.
- exp_keys = [0, 1]
- next_key_number = exp_keys[-1] + 1 # keep track of next key
- self.assertEqual(exp_keys, self.keys)
-
- # Rotate the keys just enough times to fully populate the key
- # repository.
- for rotation in range(max_active_keys - min_active_keys):
- fernet_utils.rotate_keys()
- self.assertRepositoryState(expected_size=rotation + 3)
-
- exp_keys.append(next_key_number)
- next_key_number += 1
- self.assertEqual(exp_keys, self.keys)
-
- # We should have a fully populated key repository now.
- self.assertEqual(max_active_keys, self.key_repository_size)
-
- # Rotate an additional number of times to ensure that we maintain
- # the desired number of active keys.
- for rotation in range(10):
- fernet_utils.rotate_keys()
- self.assertRepositoryState(expected_size=max_active_keys)
-
- exp_keys.pop(1)
- exp_keys.append(next_key_number)
- next_key_number += 1
- self.assertEqual(exp_keys, self.keys)
-
- def test_non_numeric_files(self):
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
- evil_file = os.path.join(CONF.fernet_tokens.key_repository, '99.bak')
- with open(evil_file, 'w'):
- pass
- fernet_utils.rotate_keys()
- self.assertTrue(os.path.isfile(evil_file))
- keys = 0
- for x in os.listdir(CONF.fernet_tokens.key_repository):
- if x == '99.bak':
- continue
- keys += 1
- self.assertEqual(3, keys)
-
-
-class TestLoadKeys(unit.TestCase):
- def test_non_numeric_files(self):
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
- evil_file = os.path.join(CONF.fernet_tokens.key_repository, '~1')
- with open(evil_file, 'w'):
- pass
- keys = fernet_utils.load_keys()
- self.assertEqual(2, len(keys))
- self.assertTrue(len(keys[0]))
diff --git a/keystone-moon/keystone/tests/unit/token/test_pki_provider.py b/keystone-moon/keystone/tests/unit/token/test_pki_provider.py
deleted file mode 100644
index b3ad4c2b..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_pki_provider.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.tests import unit
-from keystone.token.providers import pki
-
-
-class TestPkiTokenProvider(unit.TestCase):
- def setUp(self):
- super(TestPkiTokenProvider, self).setUp()
- self.provider = pki.Provider()
-
- def test_supports_bind_authentication_returns_true(self):
- self.assertTrue(self.provider._supports_bind_authentication)
-
- def test_need_persistence_return_true(self):
- self.assertIs(True, self.provider.needs_persistence())
diff --git a/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py b/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py
deleted file mode 100644
index 1ffe7cfc..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.tests import unit
-from keystone.token.providers import pkiz
-
-
-class TestPkizTokenProvider(unit.TestCase):
- def setUp(self):
- super(TestPkizTokenProvider, self).setUp()
- self.provider = pkiz.Provider()
-
- def test_supports_bind_authentication_returns_true(self):
- self.assertTrue(self.provider._supports_bind_authentication)
-
- def test_need_persistence_return_true(self):
- self.assertIs(True, self.provider.needs_persistence())
diff --git a/keystone-moon/keystone/tests/unit/token/test_provider.py b/keystone-moon/keystone/tests/unit/token/test_provider.py
deleted file mode 100644
index 7093f3ba..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_provider.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import six
-from six.moves import urllib
-
-from keystone.tests import unit
-from keystone.token import provider
-
-
-class TestRandomStrings(unit.BaseTestCase):
- def test_strings_are_url_safe(self):
- s = provider.random_urlsafe_str()
- self.assertEqual(s, urllib.parse.quote_plus(s))
-
- def test_strings_can_be_converted_to_bytes(self):
- s = provider.random_urlsafe_str()
- self.assertIsInstance(s, six.text_type)
-
- b = provider.random_urlsafe_str_to_bytes(s)
- self.assertIsInstance(b, six.binary_type)
diff --git a/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py b/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py
deleted file mode 100644
index 9e8c3889..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import base64
-import uuid
-
-from testtools import matchers
-
-from keystone import exception
-from keystone.tests import unit
-from keystone.token.providers import common
-
-
-class TestTokenDataHelper(unit.TestCase):
- def setUp(self):
- super(TestTokenDataHelper, self).setUp()
- self.load_backends()
- self.v3_data_helper = common.V3TokenDataHelper()
-
- def test_v3_token_data_helper_populate_audit_info_string(self):
- token_data = {}
- audit_info_bytes = base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
- audit_info = audit_info_bytes.decode('utf-8')
- self.v3_data_helper._populate_audit_info(token_data, audit_info)
- self.assertIn(audit_info, token_data['audit_ids'])
- self.assertThat(token_data['audit_ids'], matchers.HasLength(2))
-
- def test_v3_token_data_helper_populate_audit_info_none(self):
- token_data = {}
- self.v3_data_helper._populate_audit_info(token_data, audit_info=None)
- self.assertThat(token_data['audit_ids'], matchers.HasLength(1))
- self.assertNotIn(None, token_data['audit_ids'])
-
- def test_v3_token_data_helper_populate_audit_info_list(self):
- token_data = {}
- audit_info = [base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2],
- base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]]
- self.v3_data_helper._populate_audit_info(token_data, audit_info)
- self.assertEqual(audit_info, token_data['audit_ids'])
-
- def test_v3_token_data_helper_populate_audit_info_invalid(self):
- token_data = {}
- audit_info = dict()
- self.assertRaises(exception.UnexpectedError,
- self.v3_data_helper._populate_audit_info,
- token_data=token_data,
- audit_info=audit_info)
diff --git a/keystone-moon/keystone/tests/unit/token/test_token_model.py b/keystone-moon/keystone/tests/unit/token/test_token_model.py
deleted file mode 100644
index 1cb0ef55..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_token_model.py
+++ /dev/null
@@ -1,263 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import copy
-import uuid
-
-from oslo_config import cfg
-from oslo_utils import timeutils
-from six.moves import range
-
-from keystone import exception
-from keystone.federation import constants as federation_constants
-from keystone.models import token_model
-from keystone.tests.unit import core
-from keystone.tests.unit import test_token_provider
-
-
-CONF = cfg.CONF
-
-
-class TestKeystoneTokenModel(core.TestCase):
- def setUp(self):
- super(TestKeystoneTokenModel, self).setUp()
- self.v2_sample_token = copy.deepcopy(
- test_token_provider.SAMPLE_V2_TOKEN)
- self.v3_sample_token = copy.deepcopy(
- test_token_provider.SAMPLE_V3_TOKEN)
-
- def test_token_model_v3(self):
- token_data = token_model.KeystoneToken(uuid.uuid4().hex,
- self.v3_sample_token)
- self.assertIs(token_model.V3, token_data.version)
- expires = timeutils.normalize_time(timeutils.parse_isotime(
- self.v3_sample_token['token']['expires_at']))
- issued = timeutils.normalize_time(timeutils.parse_isotime(
- self.v3_sample_token['token']['issued_at']))
- self.assertEqual(expires, token_data.expires)
- self.assertEqual(issued, token_data.issued)
- self.assertEqual(self.v3_sample_token['token']['user']['id'],
- token_data.user_id)
- self.assertEqual(self.v3_sample_token['token']['user']['name'],
- token_data.user_name)
- self.assertEqual(self.v3_sample_token['token']['user']['domain']['id'],
- token_data.user_domain_id)
- self.assertEqual(
- self.v3_sample_token['token']['user']['domain']['name'],
- token_data.user_domain_name)
- self.assertEqual(
- self.v3_sample_token['token']['project']['domain']['id'],
- token_data.project_domain_id)
- self.assertEqual(
- self.v3_sample_token['token']['project']['domain']['name'],
- token_data.project_domain_name)
- self.assertEqual(self.v3_sample_token['token']['OS-TRUST:trust']['id'],
- token_data.trust_id)
- self.assertEqual(
- self.v3_sample_token['token']['OS-TRUST:trust']['trustor_user_id'],
- token_data.trustor_user_id)
- self.assertEqual(
- self.v3_sample_token['token']['OS-TRUST:trust']['trustee_user_id'],
- token_data.trustee_user_id)
- # Project Scoped Token
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'domain_id')
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'domain_name')
- self.assertFalse(token_data.domain_scoped)
- self.assertEqual(self.v3_sample_token['token']['project']['id'],
- token_data.project_id)
- self.assertEqual(self.v3_sample_token['token']['project']['name'],
- token_data.project_name)
- self.assertTrue(token_data.project_scoped)
- self.assertTrue(token_data.scoped)
- self.assertTrue(token_data.trust_scoped)
- self.assertEqual(
- [r['id'] for r in self.v3_sample_token['token']['roles']],
- token_data.role_ids)
- self.assertEqual(
- [r['name'] for r in self.v3_sample_token['token']['roles']],
- token_data.role_names)
- token_data.pop('project')
- self.assertFalse(token_data.project_scoped)
- self.assertFalse(token_data.scoped)
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'project_id')
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'project_name')
- self.assertFalse(token_data.project_scoped)
- domain_id = uuid.uuid4().hex
- domain_name = uuid.uuid4().hex
- token_data['domain'] = {'id': domain_id,
- 'name': domain_name}
- self.assertEqual(domain_id, token_data.domain_id)
- self.assertEqual(domain_name, token_data.domain_name)
- self.assertTrue(token_data.domain_scoped)
-
- token_data['audit_ids'] = [uuid.uuid4().hex]
- self.assertEqual(token_data.audit_id,
- token_data['audit_ids'][0])
- self.assertEqual(token_data.audit_chain_id,
- token_data['audit_ids'][0])
- token_data['audit_ids'].append(uuid.uuid4().hex)
- self.assertEqual(token_data.audit_chain_id,
- token_data['audit_ids'][1])
- del token_data['audit_ids']
- self.assertIsNone(token_data.audit_id)
- self.assertIsNone(token_data.audit_chain_id)
-
- def test_token_model_v3_federated_user(self):
- token_data = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=self.v3_sample_token)
- federation_data = {'identity_provider': {'id': uuid.uuid4().hex},
- 'protocol': {'id': 'saml2'},
- 'groups': [{'id': uuid.uuid4().hex}
- for x in range(1, 5)]}
-
- self.assertFalse(token_data.is_federated_user)
- self.assertEqual([], token_data.federation_group_ids)
- self.assertIsNone(token_data.federation_protocol_id)
- self.assertIsNone(token_data.federation_idp_id)
-
- token_data['user'][federation_constants.FEDERATION] = federation_data
-
- self.assertTrue(token_data.is_federated_user)
- self.assertEqual([x['id'] for x in federation_data['groups']],
- token_data.federation_group_ids)
- self.assertEqual(federation_data['protocol']['id'],
- token_data.federation_protocol_id)
- self.assertEqual(federation_data['identity_provider']['id'],
- token_data.federation_idp_id)
-
- def test_token_model_v2_federated_user(self):
- token_data = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=self.v2_sample_token)
- federation_data = {'identity_provider': {'id': uuid.uuid4().hex},
- 'protocol': {'id': 'saml2'},
- 'groups': [{'id': uuid.uuid4().hex}
- for x in range(1, 5)]}
- self.assertFalse(token_data.is_federated_user)
- self.assertEqual([], token_data.federation_group_ids)
- self.assertIsNone(token_data.federation_protocol_id)
- self.assertIsNone(token_data.federation_idp_id)
-
- token_data['user'][federation_constants.FEDERATION] = federation_data
-
- # Federated users should not exist in V2, the data should remain empty
- self.assertFalse(token_data.is_federated_user)
- self.assertEqual([], token_data.federation_group_ids)
- self.assertIsNone(token_data.federation_protocol_id)
- self.assertIsNone(token_data.federation_idp_id)
-
- def test_token_model_v2(self):
- token_data = token_model.KeystoneToken(uuid.uuid4().hex,
- self.v2_sample_token)
- self.assertIs(token_model.V2, token_data.version)
- expires = timeutils.normalize_time(timeutils.parse_isotime(
- self.v2_sample_token['access']['token']['expires']))
- issued = timeutils.normalize_time(timeutils.parse_isotime(
- self.v2_sample_token['access']['token']['issued_at']))
- self.assertEqual(expires, token_data.expires)
- self.assertEqual(issued, token_data.issued)
- self.assertEqual(self.v2_sample_token['access']['user']['id'],
- token_data.user_id)
- self.assertEqual(self.v2_sample_token['access']['user']['name'],
- token_data.user_name)
- self.assertEqual(CONF.identity.default_domain_id,
- token_data.user_domain_id)
- self.assertEqual('Default', token_data.user_domain_name)
- self.assertEqual(CONF.identity.default_domain_id,
- token_data.project_domain_id)
- self.assertEqual('Default',
- token_data.project_domain_name)
- self.assertEqual(self.v2_sample_token['access']['trust']['id'],
- token_data.trust_id)
- self.assertEqual(
- self.v2_sample_token['access']['trust']['trustor_user_id'],
- token_data.trustor_user_id)
- self.assertEqual(
- self.v2_sample_token['access']['trust']['impersonation'],
- token_data.trust_impersonation)
- self.assertEqual(
- self.v2_sample_token['access']['trust']['trustee_user_id'],
- token_data.trustee_user_id)
- # Project Scoped Token
- self.assertEqual(
- self.v2_sample_token['access']['token']['tenant']['id'],
- token_data.project_id)
- self.assertEqual(
- self.v2_sample_token['access']['token']['tenant']['name'],
- token_data.project_name)
- self.assertTrue(token_data.project_scoped)
- self.assertTrue(token_data.scoped)
- self.assertTrue(token_data.trust_scoped)
- self.assertEqual(
- [r['name']
- for r in self.v2_sample_token['access']['user']['roles']],
- token_data.role_names)
- token_data['token'].pop('tenant')
- self.assertFalse(token_data.scoped)
- self.assertFalse(token_data.project_scoped)
- self.assertFalse(token_data.domain_scoped)
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'project_id')
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'project_name')
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'project_domain_id')
- self.assertRaises(exception.UnexpectedError, getattr, token_data,
- 'project_domain_id')
- # No Domain Scoped tokens in V2
- self.assertRaises(NotImplementedError, getattr, token_data,
- 'domain_id')
- self.assertRaises(NotImplementedError, getattr, token_data,
- 'domain_name')
- token_data['domain'] = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.assertRaises(NotImplementedError, getattr, token_data,
- 'domain_id')
- self.assertRaises(NotImplementedError, getattr, token_data,
- 'domain_name')
- self.assertFalse(token_data.domain_scoped)
-
- token_data['token']['audit_ids'] = [uuid.uuid4().hex]
- self.assertEqual(token_data.audit_chain_id,
- token_data['token']['audit_ids'][0])
- token_data['token']['audit_ids'].append(uuid.uuid4().hex)
- self.assertEqual(token_data.audit_chain_id,
- token_data['token']['audit_ids'][1])
- self.assertEqual(token_data.audit_id,
- token_data['token']['audit_ids'][0])
- del token_data['token']['audit_ids']
- self.assertIsNone(token_data.audit_id)
- self.assertIsNone(token_data.audit_chain_id)
-
- def test_token_model_unknown(self):
- self.assertRaises(exception.UnsupportedTokenVersionException,
- token_model.KeystoneToken,
- token_id=uuid.uuid4().hex,
- token_data={'bogus_data': uuid.uuid4().hex})
-
- def test_token_model_dual_scoped_token(self):
- domain = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.v2_sample_token['access']['domain'] = domain
- self.v3_sample_token['token']['domain'] = domain
-
- # V2 Tokens Cannot be domain scoped, this should work
- token_model.KeystoneToken(token_id=uuid.uuid4().hex,
- token_data=self.v2_sample_token)
-
- self.assertRaises(exception.UnexpectedError,
- token_model.KeystoneToken,
- token_id=uuid.uuid4().hex,
- token_data=self.v3_sample_token)
diff --git a/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py b/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py
deleted file mode 100644
index 5c364490..00000000
--- a/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from keystone.tests import unit
-from keystone.token.providers import uuid
-
-
-class TestUuidTokenProvider(unit.TestCase):
- def setUp(self):
- super(TestUuidTokenProvider, self).setUp()
- self.provider = uuid.Provider()
-
- def test_supports_bind_authentication_returns_true(self):
- self.assertTrue(self.provider._supports_bind_authentication)
-
- def test_need_persistence_return_true(self):
- self.assertIs(True, self.provider.needs_persistence())
diff --git a/keystone-moon/keystone/tests/unit/trust/__init__.py b/keystone-moon/keystone/tests/unit/trust/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/keystone-moon/keystone/tests/unit/trust/__init__.py
+++ /dev/null
diff --git a/keystone-moon/keystone/tests/unit/trust/test_backends.py b/keystone-moon/keystone/tests/unit/trust/test_backends.py
deleted file mode 100644
index 05df866f..00000000
--- a/keystone-moon/keystone/tests/unit/trust/test_backends.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import uuid
-
-from oslo_utils import timeutils
-from six.moves import range
-
-from keystone import exception
-
-
-class TrustTests(object):
- def create_sample_trust(self, new_id, remaining_uses=None):
- self.trustor = self.user_foo
- self.trustee = self.user_two
- expires_at = datetime.datetime.utcnow().replace(year=2032)
- trust_data = (self.trust_api.create_trust
- (new_id,
- {'trustor_user_id': self.trustor['id'],
- 'trustee_user_id': self.user_two['id'],
- 'project_id': self.tenant_bar['id'],
- 'expires_at': expires_at,
- 'impersonation': True,
- 'remaining_uses': remaining_uses},
- roles=[{"id": "member"},
- {"id": "other"},
- {"id": "browser"}]))
- return trust_data
-
- def test_delete_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
- trust_id = trust_data['id']
- self.assertIsNotNone(trust_data)
- trust_data = self.trust_api.get_trust(trust_id)
- self.assertEqual(new_id, trust_data['id'])
- self.trust_api.delete_trust(trust_id)
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.get_trust,
- trust_id)
-
- def test_delete_trust_not_found(self):
- trust_id = uuid.uuid4().hex
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.delete_trust,
- trust_id)
-
- def test_get_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
- trust_id = trust_data['id']
- self.assertIsNotNone(trust_data)
- trust_data = self.trust_api.get_trust(trust_id)
- self.assertEqual(new_id, trust_data['id'])
- self.trust_api.delete_trust(trust_data['id'])
-
- def test_get_deleted_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
- self.assertIsNotNone(trust_data)
- self.assertIsNone(trust_data['deleted_at'])
- self.trust_api.delete_trust(new_id)
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.get_trust,
- new_id)
- deleted_trust = self.trust_api.get_trust(trust_data['id'],
- deleted=True)
- self.assertEqual(trust_data['id'], deleted_trust['id'])
- self.assertIsNotNone(deleted_trust.get('deleted_at'))
-
- def test_create_trust(self):
- new_id = uuid.uuid4().hex
- trust_data = self.create_sample_trust(new_id)
-
- self.assertEqual(new_id, trust_data['id'])
- self.assertEqual(self.trustee['id'], trust_data['trustee_user_id'])
- self.assertEqual(self.trustor['id'], trust_data['trustor_user_id'])
- self.assertTrue(timeutils.normalize_time(trust_data['expires_at']) >
- timeutils.utcnow())
-
- self.assertEqual([{'id': 'member'},
- {'id': 'other'},
- {'id': 'browser'}], trust_data['roles'])
-
- def test_list_trust_by_trustee(self):
- for i in range(3):
- self.create_sample_trust(uuid.uuid4().hex)
- trusts = self.trust_api.list_trusts_for_trustee(self.trustee['id'])
- self.assertEqual(3, len(trusts))
- self.assertEqual(trusts[0]["trustee_user_id"], self.trustee['id'])
- trusts = self.trust_api.list_trusts_for_trustee(self.trustor['id'])
- self.assertEqual(0, len(trusts))
-
- def test_list_trust_by_trustor(self):
- for i in range(3):
- self.create_sample_trust(uuid.uuid4().hex)
- trusts = self.trust_api.list_trusts_for_trustor(self.trustor['id'])
- self.assertEqual(3, len(trusts))
- self.assertEqual(trusts[0]["trustor_user_id"], self.trustor['id'])
- trusts = self.trust_api.list_trusts_for_trustor(self.trustee['id'])
- self.assertEqual(0, len(trusts))
-
- def test_list_trusts(self):
- for i in range(3):
- self.create_sample_trust(uuid.uuid4().hex)
- trusts = self.trust_api.list_trusts()
- self.assertEqual(3, len(trusts))
-
- def test_trust_has_remaining_uses_positive(self):
- # create a trust with limited uses, check that we have uses left
- trust_data = self.create_sample_trust(uuid.uuid4().hex,
- remaining_uses=5)
- self.assertEqual(5, trust_data['remaining_uses'])
- # create a trust with unlimited uses, check that we have uses left
- trust_data = self.create_sample_trust(uuid.uuid4().hex)
- self.assertIsNone(trust_data['remaining_uses'])
-
- def test_trust_has_remaining_uses_negative(self):
- # try to create a trust with no remaining uses, check that it fails
- self.assertRaises(exception.ValidationError,
- self.create_sample_trust,
- uuid.uuid4().hex,
- remaining_uses=0)
- # try to create a trust with negative remaining uses,
- # check that it fails
- self.assertRaises(exception.ValidationError,
- self.create_sample_trust,
- uuid.uuid4().hex,
- remaining_uses=-12)
-
- def test_consume_use(self):
- # consume a trust repeatedly until it has no uses anymore
- trust_data = self.create_sample_trust(uuid.uuid4().hex,
- remaining_uses=2)
- self.trust_api.consume_use(trust_data['id'])
- t = self.trust_api.get_trust(trust_data['id'])
- self.assertEqual(1, t['remaining_uses'])
- self.trust_api.consume_use(trust_data['id'])
- # This was the last use, the trust isn't available anymore
- self.assertRaises(exception.TrustNotFound,
- self.trust_api.get_trust,
- trust_data['id'])
-
- def test_duplicate_trusts_not_allowed(self):
- self.trustor = self.user_foo
- self.trustee = self.user_two
- trust_data = {'trustor_user_id': self.trustor['id'],
- 'trustee_user_id': self.user_two['id'],
- 'project_id': self.tenant_bar['id'],
- 'expires_at': timeutils.parse_isotime(
- '2032-02-18T18:10:00Z'),
- 'impersonation': True,
- 'remaining_uses': None}
- roles = [{"id": "member"},
- {"id": "other"},
- {"id": "browser"}]
- self.trust_api.create_trust(uuid.uuid4().hex, trust_data, roles)
- self.assertRaises(exception.Conflict,
- self.trust_api.create_trust,
- uuid.uuid4().hex,
- trust_data,
- roles)
diff --git a/keystone-moon/keystone/tests/unit/utils.py b/keystone-moon/keystone/tests/unit/utils.py
deleted file mode 100644
index e3e49e70..00000000
--- a/keystone-moon/keystone/tests/unit/utils.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Useful utilities for tests."""
-
-import functools
-import os
-import time
-import uuid
-
-import six
-from testtools import testcase
-
-
-TZ = None
-
-
-def timezone(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- tz_original = os.environ.get('TZ')
- try:
- if TZ:
- os.environ['TZ'] = TZ
- time.tzset()
- return func(*args, **kwargs)
- finally:
- if TZ:
- if tz_original:
- os.environ['TZ'] = tz_original
- else:
- if 'TZ' in os.environ:
- del os.environ['TZ']
- time.tzset()
- return wrapper
-
-
-def new_uuid():
- """Return a string UUID."""
- return uuid.uuid4().hex
-
-
-def wip(message):
- """Mark a test as work in progress.
-
- Based on code by Nat Pryce:
- https://gist.github.com/npryce/997195#file-wip-py
-
- The test will always be run. If the test fails then a TestSkipped
- exception is raised. If the test passes an AssertionError exception
- is raised so that the developer knows they made the test pass. This
- is a reminder to remove the decorator.
-
- :param message: a string message to help clarify why the test is
- marked as a work in progress
-
- usage:
- >>> @wip('waiting on bug #000000')
- >>> def test():
- >>> pass
-
- """
- def _wip(f):
- @six.wraps(f)
- def run_test(*args, **kwargs):
- try:
- f(*args, **kwargs)
- except Exception:
- raise testcase.TestSkipped('work in progress test failed: ' +
- message)
-
- raise AssertionError('work in progress test passed: ' + message)
-
- return run_test
-
- return _wip