aboutsummaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/tests
diff options
context:
space:
mode:
authorDUVAL Thomas <thomas.duval@orange.com>2016-06-09 09:11:50 +0200
committerDUVAL Thomas <thomas.duval@orange.com>2016-06-09 09:11:50 +0200
commit2e7b4f2027a1147ca28301e4f88adf8274b39a1f (patch)
tree8b8d94001ebe6cc34106cf813b538911a8d66d9a /keystone-moon/keystone/tests
parenta33bdcb627102a01244630a54cb4b5066b385a6a (diff)
Update Keystone core to Mitaka.
Change-Id: Ia10d6add16f4a9d25d1f42d420661c46332e69db
Diffstat (limited to 'keystone-moon/keystone/tests')
-rw-r--r--keystone-moon/keystone/tests/common/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/common/auth.py109
-rw-r--r--keystone-moon/keystone/tests/functional/core.py85
-rw-r--r--keystone-moon/keystone/tests/functional/shared/test_running.py22
-rw-r--r--keystone-moon/keystone/tests/hacking/checks.py45
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py2
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py4
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py4
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py2
-rw-r--r--keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py2
-rw-r--r--keystone-moon/keystone/tests/unit/__init__.py19
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py112
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/test_backends.py3755
-rw-r--r--keystone-moon/keystone/tests/unit/assignment/test_core.py123
-rw-r--r--keystone-moon/keystone/tests/unit/backend/core_ldap.py4
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py39
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py108
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py71
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py30
-rw-r--r--keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/test_backends.py588
-rw-r--r--keystone-moon/keystone/tests/unit/catalog/test_core.py30
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_authorization.py161
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_ldap.py36
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_manager.py5
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_notifications.py329
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_sql_core.py10
-rw-r--r--keystone-moon/keystone/tests/unit/common/test_utils.py48
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf4
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf3
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_sql.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf3
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf2
-rw-r--r--keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf3
-rw-r--r--keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py299
-rw-r--r--keystone-moon/keystone/tests/unit/core.py388
-rw-r--r--keystone-moon/keystone/tests/unit/default_fixtures.py61
-rw-r--r--keystone-moon/keystone/tests/unit/external/README.rst9
-rw-r--r--keystone-moon/keystone/tests/unit/external/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/external/test_timeutils.py33
-rw-r--r--keystone-moon/keystone/tests/unit/fakeldap.py61
-rw-r--r--keystone-moon/keystone/tests/unit/filtering.py3
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_backends.py1297
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_controllers.py65
-rw-r--r--keystone-moon/keystone/tests/unit/identity/test_core.py4
-rw-r--r--keystone-moon/keystone/tests/unit/identity_mapping.py7
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/__init__.py2
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/appserver.py6
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py34
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/cache.py17
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/database.py75
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/hacking.py176
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py3
-rw-r--r--keystone-moon/keystone/tests/unit/ksfixtures/policy.py33
-rw-r--r--keystone-moon/keystone/tests/unit/mapping_fixtures.py176
-rw-r--r--keystone-moon/keystone/tests/unit/policy/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/policy/test_backends.py86
-rw-r--r--keystone-moon/keystone/tests/unit/resource/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/resource/backends/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/resource/backends/test_sql.py24
-rw-r--r--keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py53
-rw-r--r--keystone-moon/keystone/tests/unit/resource/test_backends.py1669
-rw-r--r--keystone-moon/keystone/tests/unit/resource/test_controllers.py57
-rw-r--r--keystone-moon/keystone/tests/unit/resource/test_core.py692
-rw-r--r--keystone-moon/keystone/tests/unit/rest.py28
-rw-r--r--keystone-moon/keystone/tests/unit/schema/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/schema/v2.py161
-rw-r--r--keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py453
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth.py202
-rw-r--r--keystone-moon/keystone/tests/unit/test_auth_plugin.py2
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py23
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py5
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_kvs.py66
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap.py1285
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py29
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_rules.py19
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_sql.py619
-rw-r--r--keystone-moon/keystone/tests/unit/test_backend_templated.py52
-rw-r--r--keystone-moon/keystone/tests/unit/test_catalog.py131
-rw-r--r--keystone-moon/keystone/tests/unit/test_cert_setup.py37
-rw-r--r--keystone-moon/keystone/tests/unit/test_cli.py242
-rw-r--r--keystone-moon/keystone/tests/unit/test_config.py2
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_s3_core.py56
-rw-r--r--keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_credential.py265
-rw-r--r--keystone-moon/keystone/tests/unit/test_driver_hints.py2
-rw-r--r--keystone-moon/keystone/tests/unit/test_entry_points.py48
-rw-r--r--keystone-moon/keystone/tests/unit/test_exception.py74
-rw-r--r--keystone-moon/keystone/tests/unit/test_hacking_checks.py42
-rw-r--r--keystone-moon/keystone/tests/unit/test_kvs.py38
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_livetest.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py3
-rw-r--r--keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py4
-rw-r--r--keystone-moon/keystone/tests/unit/test_middleware.py620
-rw-r--r--keystone-moon/keystone/tests/unit/test_policy.py41
-rw-r--r--keystone-moon/keystone/tests/unit/test_revoke.py76
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_livetest.py24
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py353
-rw-r--r--keystone-moon/keystone/tests/unit/test_sql_upgrade.py1234
-rw-r--r--keystone-moon/keystone/tests/unit/test_token_provider.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_url_middleware.py1
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2.py150
-rw-r--r--keystone-moon/keystone/tests/unit/test_v2_controller.py75
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3.py681
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_assignment.py2419
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_auth.py3237
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_catalog.py349
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_credential.py242
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_domain_config.py259
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py58
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_federation.py562
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_filters.py57
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_identity.py461
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_oauth1.py66
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_os_revoke.py10
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_policy.py29
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_protection.py739
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_resource.py1434
-rw-r--r--keystone-moon/keystone/tests/unit/test_v3_trust.py403
-rw-r--r--keystone-moon/keystone/tests/unit/test_validation.py352
-rw-r--r--keystone-moon/keystone/tests/unit/test_versions.py257
-rw-r--r--keystone-moon/keystone/tests/unit/test_wsgi.py141
-rw-r--r--keystone-moon/keystone/tests/unit/tests/test_core.py2
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_backends.py551
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_fernet_provider.py428
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_provider.py4
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_token_data_helper.py3
-rw-r--r--keystone-moon/keystone/tests/unit/token/test_token_model.py2
-rw-r--r--keystone-moon/keystone/tests/unit/trust/__init__.py0
-rw-r--r--keystone-moon/keystone/tests/unit/trust/test_backends.py172
-rw-r--r--keystone-moon/keystone/tests/unit/utils.py4
144 files changed, 22861 insertions, 7647 deletions
diff --git a/keystone-moon/keystone/tests/common/__init__.py b/keystone-moon/keystone/tests/common/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/common/__init__.py
diff --git a/keystone-moon/keystone/tests/common/auth.py b/keystone-moon/keystone/tests/common/auth.py
new file mode 100644
index 00000000..547418cf
--- /dev/null
+++ b/keystone-moon/keystone/tests/common/auth.py
@@ -0,0 +1,109 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+class AuthTestMixin(object):
+ """To hold auth building helper functions."""
+
+ def _build_auth_scope(self, project_id=None, project_name=None,
+ project_domain_id=None, project_domain_name=None,
+ domain_id=None, domain_name=None, trust_id=None,
+ unscoped=None):
+ scope_data = {}
+ if unscoped:
+ scope_data['unscoped'] = {}
+ if project_id or project_name:
+ scope_data['project'] = {}
+ if project_id:
+ scope_data['project']['id'] = project_id
+ else:
+ scope_data['project']['name'] = project_name
+ if project_domain_id or project_domain_name:
+ project_domain_json = {}
+ if project_domain_id:
+ project_domain_json['id'] = project_domain_id
+ else:
+ project_domain_json['name'] = project_domain_name
+ scope_data['project']['domain'] = project_domain_json
+ if domain_id or domain_name:
+ scope_data['domain'] = {}
+ if domain_id:
+ scope_data['domain']['id'] = domain_id
+ else:
+ scope_data['domain']['name'] = domain_name
+ if trust_id:
+ scope_data['OS-TRUST:trust'] = {}
+ scope_data['OS-TRUST:trust']['id'] = trust_id
+ return scope_data
+
+ def _build_auth(self, user_id=None, username=None, user_domain_id=None,
+ user_domain_name=None, **kwargs):
+
+ # NOTE(dstanek): just to ensure sanity in the tests
+ self.assertEqual(1, len(kwargs),
+ message='_build_auth requires 1 (and only 1) '
+ 'secret type and value')
+
+ secret_type, secret_value = list(kwargs.items())[0]
+
+ # NOTE(dstanek): just to ensure sanity in the tests
+ self.assertIn(secret_type, ('passcode', 'password'),
+ message="_build_auth only supports 'passcode' "
+ "and 'password' secret types")
+
+ data = {'user': {}}
+ if user_id:
+ data['user']['id'] = user_id
+ else:
+ data['user']['name'] = username
+ if user_domain_id or user_domain_name:
+ data['user']['domain'] = {}
+ if user_domain_id:
+ data['user']['domain']['id'] = user_domain_id
+ else:
+ data['user']['domain']['name'] = user_domain_name
+ data['user'][secret_type] = secret_value
+ return data
+
+ def _build_token_auth(self, token):
+ return {'id': token}
+
+ def build_authentication_request(self, token=None, user_id=None,
+ username=None, user_domain_id=None,
+ user_domain_name=None, password=None,
+ kerberos=False, passcode=None, **kwargs):
+ """Build auth dictionary.
+
+ It will create an auth dictionary based on all the arguments
+ that it receives.
+ """
+ auth_data = {}
+ auth_data['identity'] = {'methods': []}
+ if kerberos:
+ auth_data['identity']['methods'].append('kerberos')
+ auth_data['identity']['kerberos'] = {}
+ if token:
+ auth_data['identity']['methods'].append('token')
+ auth_data['identity']['token'] = self._build_token_auth(token)
+ if password and (user_id or username):
+ auth_data['identity']['methods'].append('password')
+ auth_data['identity']['password'] = self._build_auth(
+ user_id, username, user_domain_id, user_domain_name,
+ password=password)
+ if passcode and (user_id or username):
+ auth_data['identity']['methods'].append('totp')
+ auth_data['identity']['totp'] = self._build_auth(
+ user_id, username, user_domain_id, user_domain_name,
+ passcode=passcode)
+ if kwargs:
+ auth_data['scope'] = self._build_auth_scope(**kwargs)
+ return {'auth': auth_data}
diff --git a/keystone-moon/keystone/tests/functional/core.py b/keystone-moon/keystone/tests/functional/core.py
new file mode 100644
index 00000000..2759412b
--- /dev/null
+++ b/keystone-moon/keystone/tests/functional/core.py
@@ -0,0 +1,85 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import requests
+import testtools
+
+from keystone.tests.common import auth as common_auth
+
+
+class BaseTestCase(testtools.TestCase, common_auth.AuthTestMixin):
+
+ request_headers = {'content-type': 'application/json'}
+
+ def setUp(self):
+ self.ADMIN_URL = os.environ.get('KSTEST_ADMIN_URL',
+ 'http://localhost:35357')
+ self.PUBLIC_URL = os.environ.get('KSTEST_PUBLIC_URL',
+ 'http://localhost:5000')
+ self.admin = {
+ 'name': os.environ.get('KSTEST_ADMIN_USERNAME', 'admin'),
+ 'password': os.environ.get('KSTEST_ADMIN_PASSWORD', ''),
+ 'domain_id': os.environ.get('KSTEST_ADMIN_DOMAIN_ID', 'default')
+ }
+
+ self.user = {
+ 'name': os.environ.get('KSTEST_USER_USERNAME', 'demo'),
+ 'password': os.environ.get('KSTEST_USER_PASSWORD', ''),
+ 'domain_id': os.environ.get('KSTEST_USER_DOMAIN_ID', 'default')
+ }
+
+ self.project_id = os.environ.get('KSTEST_PROJECT_ID')
+
+ super(BaseTestCase, self).setUp()
+
+ def _http_headers(self, token=None):
+ headers = {'content-type': 'application/json'}
+ if token:
+ headers['X-Auth-Token'] = token
+ return headers
+
+ def get_scoped_token_response(self, user):
+ """Convenience method so that we can test authenticated requests
+
+ :param user: A dictionary with user information like 'username',
+ 'password', 'domain_id'
+ :returns: urllib3.Response object
+
+ """
+ body = self.build_authentication_request(
+ username=user['name'], user_domain_name=user['domain_id'],
+ password=user['password'], project_id=self.project_id)
+ return requests.post(self.PUBLIC_URL + '/v3/auth/tokens',
+ headers=self.request_headers,
+ json=body)
+
+ def get_scoped_token(self, user):
+ """Convenience method for getting scoped token
+
+ This method doesn't do any token validaton.
+
+ :param user: A dictionary with user information like 'username',
+ 'password', 'domain_id'
+ :returns: An OpenStack token for further use
+ :rtype: str
+
+ """
+ r = self.get_scoped_token_response(user)
+ return r.headers.get('X-Subject-Token')
+
+ def get_scoped_admin_token(self):
+ return self.get_scoped_token(self.admin)
+
+ def get_scoped_user_token(self):
+ return self.get_scoped_token(self.user)
diff --git a/keystone-moon/keystone/tests/functional/shared/test_running.py b/keystone-moon/keystone/tests/functional/shared/test_running.py
index aed48ac2..1b46b32d 100644
--- a/keystone-moon/keystone/tests/functional/shared/test_running.py
+++ b/keystone-moon/keystone/tests/functional/shared/test_running.py
@@ -13,38 +13,46 @@
import requests
import testtools.matchers
+from keystone.tests.functional import core as functests
+
is_multiple_choices = testtools.matchers.Equals(
requests.status_codes.codes.multiple_choices)
is_ok = testtools.matchers.Equals(requests.status_codes.codes.ok)
-admin_url = 'http://localhost:35357'
-public_url = 'http://localhost:5000'
versions = ('v2.0', 'v3')
-class TestServerRunning(testtools.TestCase):
+class TestServerRunning(functests.BaseTestCase):
def test_admin_responds_with_multiple_choices(self):
- resp = requests.get(admin_url)
+ resp = requests.get(self.ADMIN_URL)
self.assertThat(resp.status_code, is_multiple_choices)
def test_admin_versions(self):
for version in versions:
- resp = requests.get(admin_url + '/' + version)
+ resp = requests.get(self.ADMIN_URL + '/' + version)
self.assertThat(
resp.status_code,
testtools.matchers.Annotate(
'failed for version %s' % version, is_ok))
def test_public_responds_with_multiple_choices(self):
- resp = requests.get(public_url)
+ resp = requests.get(self.PUBLIC_URL)
self.assertThat(resp.status_code, is_multiple_choices)
def test_public_versions(self):
for version in versions:
- resp = requests.get(public_url + '/' + version)
+ resp = requests.get(self.PUBLIC_URL + '/' + version)
self.assertThat(
resp.status_code,
testtools.matchers.Annotate(
'failed for version %s' % version, is_ok))
+
+ def test_get_user_token(self):
+ token = self.get_scoped_user_token()
+ self.assertIsNotNone(token)
+
+ def test_get_admin_token(self):
+ token = self.get_scoped_admin_token()
+ self.assertIsNotNone(token)
diff --git a/keystone-moon/keystone/tests/hacking/checks.py b/keystone-moon/keystone/tests/hacking/checks.py
index 17bafff3..581dbcf9 100644
--- a/keystone-moon/keystone/tests/hacking/checks.py
+++ b/keystone-moon/keystone/tests/hacking/checks.py
@@ -126,14 +126,21 @@ class CheckForAssertingNoneEquality(BaseASTChecker):
# NOTE(dstanek): I wrote this in a verbose way to make it easier to
# read for those that have little experience with Python's AST.
+ def _is_None(node):
+ if six.PY3:
+ return (isinstance(node, ast.NameConstant)
+ and node.value is None)
+ else:
+ return isinstance(node, ast.Name) and node.id == 'None'
+
if isinstance(node.func, ast.Attribute):
if node.func.attr == 'assertEqual':
for arg in node.args:
- if isinstance(arg, ast.Name) and arg.id == 'None':
+ if _is_None(arg):
self.add_error(node, message=self.CHECK_DESC_IS)
elif node.func.attr == 'assertNotEqual':
for arg in node.args:
- if isinstance(arg, ast.Name) and arg.id == 'None':
+ if _is_None(arg):
self.add_error(node, message=self.CHECK_DESC_ISNOT)
super(CheckForAssertingNoneEquality, self).generic_visit(node)
@@ -144,6 +151,7 @@ class CheckForLoggingIssues(BaseASTChecker):
DEBUG_CHECK_DESC = 'K005 Using translated string in debug logging'
NONDEBUG_CHECK_DESC = 'K006 Not using translating helper for logging'
EXCESS_HELPER_CHECK_DESC = 'K007 Using hints when _ is necessary'
+ USING_DEPRECATED_WARN = 'K009 Using the deprecated Logger.warn'
LOG_MODULES = ('logging', 'oslo_log.log')
I18N_MODULES = (
'keystone.i18n._',
@@ -155,7 +163,6 @@ class CheckForLoggingIssues(BaseASTChecker):
TRANS_HELPER_MAP = {
'debug': None,
'info': '_LI',
- 'warn': '_LW',
'warning': '_LW',
'error': '_LE',
'exception': '_LE',
@@ -186,9 +193,7 @@ class CheckForLoggingIssues(BaseASTChecker):
self.visit(value)
def _filter_imports(self, module_name, alias):
- """Keeps lists of logging and i18n imports
-
- """
+ """Keeps lists of logging and i18n imports."""
if module_name in self.LOG_MODULES:
self.logger_module_names.append(alias.asname or alias.name)
elif module_name in self.I18N_MODULES:
@@ -284,10 +289,7 @@ class CheckForLoggingIssues(BaseASTChecker):
return super(CheckForLoggingIssues, self).generic_visit(node)
def visit_Call(self, node):
- """Look for the 'LOG.*' calls.
-
- """
-
+ """Look for the 'LOG.*' calls."""
# obj.method
if isinstance(node.func, ast.Attribute):
obj_name = self._find_name(node.func.value)
@@ -299,13 +301,18 @@ class CheckForLoggingIssues(BaseASTChecker):
else: # could be Subscript, Call or many more
return super(CheckForLoggingIssues, self).generic_visit(node)
+ # if dealing with a logger the method can't be "warn"
+ if obj_name in self.logger_names and method_name == 'warn':
+ msg = node.args[0] # first arg to a logging method is the msg
+ self.add_error(msg, message=self.USING_DEPRECATED_WARN)
+
# must be a logger instance and one of the support logging methods
if (obj_name not in self.logger_names
or method_name not in self.TRANS_HELPER_MAP):
return super(CheckForLoggingIssues, self).generic_visit(node)
# the call must have arguments
- if not len(node.args):
+ if not node.args:
return super(CheckForLoggingIssues, self).generic_visit(node)
if method_name == 'debug':
@@ -364,7 +371,7 @@ class CheckForLoggingIssues(BaseASTChecker):
# because:
# 1. We have code like this that we'll fix when dealing with the %:
# msg = _('....') % {}
- # LOG.warn(msg)
+ # LOG.warning(msg)
# 2. We also do LOG.exception(e) in several places. I'm not sure
# exactly what we should be doing about that.
if msg.id not in self.assignments:
@@ -391,15 +398,19 @@ class CheckForLoggingIssues(BaseASTChecker):
peers = find_peers(node)
for peer in peers:
if isinstance(peer, ast.Raise):
- if (isinstance(peer.type, ast.Call) and
- len(peer.type.args) > 0 and
- isinstance(peer.type.args[0], ast.Name) and
- name in (a.id for a in peer.type.args)):
+ if six.PY3:
+ exc = peer.exc
+ else:
+ exc = peer.type
+ if (isinstance(exc, ast.Call) and
+ len(exc.args) > 0 and
+ isinstance(exc.args[0], ast.Name) and
+ name in (a.id for a in exc.args)):
return True
else:
return False
elif isinstance(peer, ast.Assign):
- if name in (t.id for t in peer.targets):
+ if name in (t.id for t in peer.targets if hasattr(t, 'id')):
return False
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py
index 9775047d..59eb3d25 100644
--- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py
@@ -57,7 +57,7 @@ class TestConfigurationManager(tests.TestCase):
self.config_fixture.config(
group='moon',
tenant_driver='keystone.contrib.moon.backends.sql.TenantConnector')
- self.policy_directory = 'examples/moon/policies'
+ self.policy_directory = '/etc/keystone/policies'
self.config_fixture.config(
group='moon',
intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py
index ca6ef93e..f32df5dd 100644
--- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py
@@ -65,7 +65,7 @@ class TestIntraExtensionAdminManagerOK(tests.TestCase):
def config_overrides(self):
super(TestIntraExtensionAdminManagerOK, self).config_overrides()
- self.policy_directory = 'examples/moon/policies'
+ self.policy_directory = '/etc/keystone/policies'
self.config_fixture.config(
group='moon',
intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
@@ -989,7 +989,7 @@ class TestIntraExtensionAdminManagerKO(tests.TestCase):
def config_overrides(self):
super(TestIntraExtensionAdminManagerKO, self).config_overrides()
- self.policy_directory = 'examples/moon/policies'
+ self.policy_directory = '/etc/keystone/policies'
self.config_fixture.config(
group='moon',
intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py
index 8fa46268..13d9dcd1 100644
--- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py
@@ -64,7 +64,7 @@ class TestIntraExtensionAuthzManagerAuthzOK(tests.TestCase):
def config_overrides(self):
super(TestIntraExtensionAuthzManagerAuthzOK, self).config_overrides()
- self.policy_directory = 'examples/moon/policies'
+ self.policy_directory = '/etc/keystone/policies'
self.config_fixture.config(
group='moon',
intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
@@ -975,7 +975,7 @@ class TestIntraExtensionAuthzManagerAuthzKO(tests.TestCase):
def config_overrides(self):
super(TestIntraExtensionAuthzManagerAuthzKO, self).config_overrides()
- self.policy_directory = 'examples/moon/policies'
+ self.policy_directory = '/etc/keystone/policies'
self.root_policy_directory = 'policy_root'
self.config_fixture.config(
group='moon',
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py
index 143e7c8b..49886d32 100644
--- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py
@@ -66,7 +66,7 @@ class TestIntraExtensionAdminManager(tests.TestCase):
def config_overrides(self):
super(TestIntraExtensionAdminManager, self).config_overrides()
- self.policy_directory = 'examples/moon/policies'
+ self.policy_directory = '/etc/keystone/policies'
self.config_fixture.config(
group='moon',
intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py
index c2f60424..47b0df8f 100644
--- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py
+++ b/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py
@@ -66,7 +66,7 @@ class TestTenantManager(tests.TestCase):
self.config_fixture.config(
group='moon',
tenant_driver='keystone.contrib.moon.backends.sql.TenantConnector')
- self.policy_directory = 'examples/moon/policies'
+ self.policy_directory = '/etc/keystone/policies'
self.config_fixture.config(
group='moon',
intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector')
diff --git a/keystone-moon/keystone/tests/unit/__init__.py b/keystone-moon/keystone/tests/unit/__init__.py
index 52af8dfc..0e92ca65 100644
--- a/keystone-moon/keystone/tests/unit/__init__.py
+++ b/keystone-moon/keystone/tests/unit/__init__.py
@@ -13,6 +13,25 @@
# under the License.
import oslo_i18n
+import six
+
+
+if six.PY3:
+ # NOTE(dstanek): This block will monkey patch libraries that are not
+ # yet supported in Python3. We do this that that it is possible to
+ # execute any tests at all. Without monkey patching modules the
+ # tests will fail with import errors.
+
+ import sys
+ from unittest import mock # noqa: our import detection is naive?
+
+ sys.modules['ldap'] = mock.Mock()
+ sys.modules['ldap.controls'] = mock.Mock()
+ sys.modules['ldap.dn'] = mock.Mock()
+ sys.modules['ldap.filter'] = mock.Mock()
+ sys.modules['ldap.modlist'] = mock.Mock()
+ sys.modules['ldappool'] = mock.Mock()
+
# NOTE(dstanek): oslo_i18n.enable_lazy() must be called before
# keystone.i18n._() is called to ensure it has the desired lazy lookup
diff --git a/keystone-moon/keystone/tests/unit/assignment/__init__.py b/keystone-moon/keystone/tests/unit/assignment/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/assignment/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py b/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py b/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py
new file mode 100644
index 00000000..37e2d924
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py
@@ -0,0 +1,112 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone.common import sql
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit.assignment import test_core
+from keystone.tests.unit.backend import core_sql
+
+
+class SqlRoleModels(core_sql.BaseBackendSqlModels):
+
+ def test_role_model(self):
+ cols = (('id', sql.String, 64),
+ ('name', sql.String, 255),
+ ('domain_id', sql.String, 64))
+ self.assertExpectedSchema('role', cols)
+
+
+class SqlRole(core_sql.BaseBackendSqlTests, test_core.RoleTests):
+
+ def test_create_null_role_name(self):
+ role = unit.new_role_ref(name=None)
+ self.assertRaises(exception.UnexpectedError,
+ self.role_api.create_role,
+ role['id'],
+ role)
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role['id'])
+
+ def test_create_duplicate_role_domain_specific_name_fails(self):
+ domain = unit.new_domain_ref()
+ role1 = unit.new_role_ref(domain_id=domain['id'])
+ self.role_api.create_role(role1['id'], role1)
+ role2 = unit.new_role_ref(name=role1['name'],
+ domain_id=domain['id'])
+ self.assertRaises(exception.Conflict,
+ self.role_api.create_role,
+ role2['id'],
+ role2)
+
+ def test_update_domain_id_of_role_fails(self):
+ # Create a global role
+ role1 = unit.new_role_ref()
+ role1 = self.role_api.create_role(role1['id'], role1)
+ # Try and update it to be domain specific
+ domainA = unit.new_domain_ref()
+ role1['domain_id'] = domainA['id']
+ self.assertRaises(exception.ValidationError,
+ self.role_api.update_role,
+ role1['id'],
+ role1)
+
+ # Create a domain specific role from scratch
+ role2 = unit.new_role_ref(domain_id=domainA['id'])
+ self.role_api.create_role(role2['id'], role2)
+ # Try to "move" it to another domain
+ domainB = unit.new_domain_ref()
+ role2['domain_id'] = domainB['id']
+ self.assertRaises(exception.ValidationError,
+ self.role_api.update_role,
+ role2['id'],
+ role2)
+ # Now try to make it global
+ role2['domain_id'] = None
+ self.assertRaises(exception.ValidationError,
+ self.role_api.update_role,
+ role2['id'],
+ role2)
+
+ def test_domain_specific_separation(self):
+ domain1 = unit.new_domain_ref()
+ role1 = unit.new_role_ref(domain_id=domain1['id'])
+ role_ref1 = self.role_api.create_role(role1['id'], role1)
+ self.assertDictEqual(role1, role_ref1)
+ # Check we can have the same named role in a different domain
+ domain2 = unit.new_domain_ref()
+ role2 = unit.new_role_ref(name=role1['name'], domain_id=domain2['id'])
+ role_ref2 = self.role_api.create_role(role2['id'], role2)
+ self.assertDictEqual(role2, role_ref2)
+ # ...and in fact that you can have the same named role as a global role
+ role3 = unit.new_role_ref(name=role1['name'])
+ role_ref3 = self.role_api.create_role(role3['id'], role3)
+ self.assertDictEqual(role3, role_ref3)
+ # Check that updating one doesn't change the others
+ role1['name'] = uuid.uuid4().hex
+ self.role_api.update_role(role1['id'], role1)
+ role_ref1 = self.role_api.get_role(role1['id'])
+ self.assertDictEqual(role1, role_ref1)
+ role_ref2 = self.role_api.get_role(role2['id'])
+ self.assertDictEqual(role2, role_ref2)
+ role_ref3 = self.role_api.get_role(role3['id'])
+ self.assertDictEqual(role3, role_ref3)
+ # Check that deleting one of these, doesn't affect the others
+ self.role_api.delete_role(role1['id'])
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role1['id'])
+ self.role_api.get_role(role2['id'])
+ self.role_api.get_role(role3['id'])
diff --git a/keystone-moon/keystone/tests/unit/assignment/test_backends.py b/keystone-moon/keystone/tests/unit/assignment/test_backends.py
new file mode 100644
index 00000000..eb40e569
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/assignment/test_backends.py
@@ -0,0 +1,3755 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import mock
+from oslo_config import cfg
+from six.moves import range
+from testtools import matchers
+
+from keystone import exception
+from keystone.tests import unit
+
+
+CONF = cfg.CONF
+
+
+class AssignmentTestHelperMixin(object):
+ """Mixin class to aid testing of assignments.
+
+ This class supports data driven test plans that enable:
+
+ - Creation of initial entities, such as domains, users, groups, projects
+ and roles
+ - Creation of assignments referencing the above entities
+ - A set of input parameters and expected outputs to list_role_assignments
+ based on the above test data
+
+ A test plan is a dict of the form:
+
+ test_plan = {
+ entities: details and number of entities,
+ group_memberships: group-user entity memberships,
+ assignments: list of assignments to create,
+ tests: list of pairs of input params and expected outputs}
+
+ An example test plan:
+
+ test_plan = {
+ # First, create the entities required. Entities are specified by
+ # a dict with the key being the entity type and the value an
+ # entity specification which can be one of:
+ #
+ # - a simple number, e.g. {'users': 3} creates 3 users
+ # - a dict where more information regarding the contents of the entity
+ # is required, e.g. {'domains' : {'users : 3}} creates a domain
+ # with three users
+ # - a list of entity specifications if multiple are required
+ #
+ # The following creates a domain that contains a single user, group and
+ # project, as well as creating three roles.
+
+ 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1},
+ 'roles': 3},
+
+ # If it is required that an existing domain be used for the new
+ # entities, then the id of that domain can be included in the
+ # domain dict. For example, if alternatively we wanted to add 3 users
+ # to the default domain, add a second domain containing 3 projects as
+ # well as 5 additional empty domains, the entities would be defined as:
+ #
+ # 'entities': {'domains': [{'id': DEFAULT_DOMAIN, 'users': 3},
+ # {'projects': 3}, 5]},
+ #
+ # A project hierarchy can be specified within the 'projects' section by
+ # nesting the 'project' key, for example to create a project with three
+ # sub-projects you would use:
+
+ 'projects': {'project': 3}
+
+ # A more complex hierarchy can also be defined, for example the
+ # following would define three projects each containing a
+ # sub-project, each of which contain a further three sub-projects.
+
+ 'projects': [{'project': {'project': 3}},
+ {'project': {'project': 3}},
+ {'project': {'project': 3}}]
+
+ # If the 'roles' entity count is defined as top level key in 'entities'
+ # dict then these are global roles. If it is placed within the
+ # 'domain' dict, then they will be domain specific roles. A mix of
+ # domain specific and global roles are allowed, with the role index
+ # being calculated in the order they are defined in the 'entities'
+ # dict.
+
+ # A set of implied role specifications. In this case, prior role
+ # index 0 implies role index 1, and role 1 implies roles 2 and 3.
+
+ 'roles': [{'role': 0, 'implied_roles': [1]},
+ {'role': 1, 'implied_roles': [2, 3]}]
+
+ # A list of groups and their members. In this case make users with
+ # index 0 and 1 members of group with index 0. Users and Groups are
+ # indexed in the order they appear in the 'entities' key above.
+
+ 'group_memberships': [{'group': 0, 'users': [0, 1]}]
+
+ # Next, create assignments between the entities, referencing the
+ # entities by index, i.e. 'user': 0 refers to user[0]. Entities are
+ # indexed in the order they appear in the 'entities' key above within
+ # their entity type.
+
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'group': 0, 'role': 2, 'domain': 0},
+ {'user': 0, 'role': 2, 'project': 0}],
+
+ # Finally, define an array of tests where list_role_assignment() is
+ # called with the given input parameters and the results are then
+ # confirmed to be as given in 'results'. Again, all entities are
+ # referenced by index.
+
+ 'tests': [
+ {'params': {},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'group': 0, 'role': 2, 'domain': 0},
+ {'user': 0, 'role': 2, 'project': 0}]},
+ {'params': {'role': 2},
+ 'results': [{'group': 0, 'role': 2, 'domain': 0},
+ {'user': 0, 'role': 2, 'project': 0}]}]
+
+ # The 'params' key also supports the 'effective',
+ # 'inherited_to_projects' and 'source_from_group_ids' options to
+ # list_role_assignments.}
+
+ """
+
+ def _handle_project_spec(self, test_data, domain_id, project_spec,
+ parent_id=None):
+ """Handle the creation of a project or hierarchy of projects.
+
+ project_spec may either be a count of the number of projects to
+ create, or it may be a list of the form:
+
+ [{'project': project_spec}, {'project': project_spec}, ...]
+
+ This method is called recursively to handle the creation of a
+ hierarchy of projects.
+
+ """
+ def _create_project(domain_id, parent_id):
+ new_project = unit.new_project_ref(domain_id=domain_id,
+ parent_id=parent_id)
+ new_project = self.resource_api.create_project(new_project['id'],
+ new_project)
+ return new_project
+
+ if isinstance(project_spec, list):
+ for this_spec in project_spec:
+ self._handle_project_spec(
+ test_data, domain_id, this_spec, parent_id=parent_id)
+ elif isinstance(project_spec, dict):
+ new_proj = _create_project(domain_id, parent_id)
+ test_data['projects'].append(new_proj)
+ self._handle_project_spec(
+ test_data, domain_id, project_spec['project'],
+ parent_id=new_proj['id'])
+ else:
+ for _ in range(project_spec):
+ test_data['projects'].append(
+ _create_project(domain_id, parent_id))
+
+ def _create_role(self, domain_id=None):
+ new_role = unit.new_role_ref(domain_id=domain_id)
+ return self.role_api.create_role(new_role['id'], new_role)
+
+ def _handle_domain_spec(self, test_data, domain_spec):
+ """Handle the creation of domains and their contents.
+
+ domain_spec may either be a count of the number of empty domains to
+ create, a dict describing the domain contents, or a list of
+ domain_specs.
+
+ In the case when a list is provided, this method calls itself
+ recursively to handle the list elements.
+
+ This method will insert any entities created into test_data
+
+ """
+ def _create_domain(domain_id=None):
+ if domain_id is None:
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'],
+ new_domain)
+ return new_domain
+ else:
+ # The test plan specified an existing domain to use
+ return self.resource_api.get_domain(domain_id)
+
+ def _create_entity_in_domain(entity_type, domain_id):
+ """Create a user or group entity in the domain."""
+ if entity_type == 'users':
+ new_entity = unit.new_user_ref(domain_id=domain_id)
+ new_entity = self.identity_api.create_user(new_entity)
+ elif entity_type == 'groups':
+ new_entity = unit.new_group_ref(domain_id=domain_id)
+ new_entity = self.identity_api.create_group(new_entity)
+ elif entity_type == 'roles':
+ new_entity = self._create_role(domain_id=domain_id)
+ else:
+ # Must be a bad test plan
+ raise exception.NotImplemented()
+ return new_entity
+
+ if isinstance(domain_spec, list):
+ for x in domain_spec:
+ self._handle_domain_spec(test_data, x)
+ elif isinstance(domain_spec, dict):
+ # If there is a domain ID specified, then use it
+ the_domain = _create_domain(domain_spec.get('id'))
+ test_data['domains'].append(the_domain)
+ for entity_type, value in domain_spec.items():
+ if entity_type == 'id':
+ # We already used this above to determine whether to
+ # use and existing domain
+ continue
+ if entity_type == 'projects':
+ # If it's projects, we need to handle the potential
+ # specification of a project hierarchy
+ self._handle_project_spec(
+ test_data, the_domain['id'], value)
+ else:
+ # It's a count of number of entities
+ for _ in range(value):
+ test_data[entity_type].append(
+ _create_entity_in_domain(
+ entity_type, the_domain['id']))
+ else:
+ for _ in range(domain_spec):
+ test_data['domains'].append(_create_domain())
+
+ def create_entities(self, entity_pattern):
+ """Create the entities specified in the test plan.
+
+ Process the 'entities' key in the test plan, creating the requested
+ entities. Each created entity will be added to the array of entities
+ stored in the returned test_data object, e.g.:
+
+ test_data['users'] = [user[0], user[1]....]
+
+ """
+ test_data = {}
+ for entity in ['users', 'groups', 'domains', 'projects', 'roles']:
+ test_data[entity] = []
+
+ # Create any domains requested and, if specified, any entities within
+ # those domains
+ if 'domains' in entity_pattern:
+ self._handle_domain_spec(test_data, entity_pattern['domains'])
+
+ # Create any roles requested
+ if 'roles' in entity_pattern:
+ for _ in range(entity_pattern['roles']):
+ test_data['roles'].append(self._create_role())
+
+ return test_data
+
+ def _convert_entity_shorthand(self, key, shorthand_data, reference_data):
+ """Convert a shorthand entity description into a full ID reference.
+
+ In test plan definitions, we allow a shorthand for referencing to an
+ entity of the form:
+
+ 'user': 0
+
+ which is actually shorthand for:
+
+ 'user_id': reference_data['users'][0]['id']
+
+ This method converts the shorthand version into the full reference.
+
+ """
+ expanded_key = '%s_id' % key
+ reference_index = '%ss' % key
+ index_value = (
+ reference_data[reference_index][shorthand_data[key]]['id'])
+ return expanded_key, index_value
+
+ def create_implied_roles(self, implied_pattern, test_data):
+ """Create the implied roles specified in the test plan."""
+ for implied_spec in implied_pattern:
+ # Each implied role specification is a dict of the form:
+ #
+ # {'role': 0, 'implied_roles': list of roles}
+
+ prior_role = test_data['roles'][implied_spec['role']]['id']
+ if isinstance(implied_spec['implied_roles'], list):
+ for this_role in implied_spec['implied_roles']:
+ implied_role = test_data['roles'][this_role]['id']
+ self.role_api.create_implied_role(prior_role, implied_role)
+ else:
+ implied_role = (
+ test_data['roles'][implied_spec['implied_roles']]['id'])
+ self.role_api.create_implied_role(prior_role, implied_role)
+
+ def create_group_memberships(self, group_pattern, test_data):
+ """Create the group memberships specified in the test plan."""
+ for group_spec in group_pattern:
+ # Each membership specification is a dict of the form:
+ #
+ # {'group': 0, 'users': [list of user indexes]}
+ #
+ # Add all users in the list to the specified group, first
+ # converting from index to full entity ID.
+ group_value = test_data['groups'][group_spec['group']]['id']
+ for user_index in group_spec['users']:
+ user_value = test_data['users'][user_index]['id']
+ self.identity_api.add_user_to_group(user_value, group_value)
+ return test_data
+
+ def create_assignments(self, assignment_pattern, test_data):
+ """Create the assignments specified in the test plan."""
+ # First store how many assignments are already in the system,
+ # so during the tests we can check the number of new assignments
+ # created.
+ test_data['initial_assignment_count'] = (
+ len(self.assignment_api.list_role_assignments()))
+
+ # Now create the new assignments in the test plan
+ for assignment in assignment_pattern:
+ # Each assignment is a dict of the form:
+ #
+ # { 'user': 0, 'project':1, 'role': 6}
+ #
+ # where the value of each item is the index into the array of
+ # entities created earlier.
+ #
+ # We process the assignment dict to create the args required to
+ # make the create_grant() call.
+ args = {}
+ for param in assignment:
+ if param == 'inherited_to_projects':
+ args[param] = assignment[param]
+ else:
+ # Turn 'entity : 0' into 'entity_id = ac6736ba873d'
+ # where entity in user, group, project or domain
+ key, value = self._convert_entity_shorthand(
+ param, assignment, test_data)
+ args[key] = value
+ self.assignment_api.create_grant(**args)
+ return test_data
+
+ def execute_assignment_cases(self, test_plan, test_data):
+ """Execute the test plan, based on the created test_data."""
+ def check_results(expected, actual, param_arg_count):
+ if param_arg_count == 0:
+ # It was an unfiltered call, so default fixture assignments
+ # might be polluting our answer - so we take into account
+ # how many assignments there were before the test.
+ self.assertEqual(
+ len(expected) + test_data['initial_assignment_count'],
+ len(actual))
+ else:
+ self.assertThat(actual, matchers.HasLength(len(expected)))
+
+ for each_expected in expected:
+ expected_assignment = {}
+ for param in each_expected:
+ if param == 'inherited_to_projects':
+ expected_assignment[param] = each_expected[param]
+ elif param == 'indirect':
+ # We're expecting the result to contain an indirect
+ # dict with the details how the role came to be placed
+ # on this entity - so convert the key/value pairs of
+ # that dict into real entity references.
+ indirect_term = {}
+ for indirect_param in each_expected[param]:
+ key, value = self._convert_entity_shorthand(
+ indirect_param, each_expected[param],
+ test_data)
+ indirect_term[key] = value
+ expected_assignment[param] = indirect_term
+ else:
+ # Convert a simple shorthand entry into a full
+ # entity reference
+ key, value = self._convert_entity_shorthand(
+ param, each_expected, test_data)
+ expected_assignment[key] = value
+ self.assertIn(expected_assignment, actual)
+
+ def convert_group_ids_sourced_from_list(index_list, reference_data):
+ value_list = []
+ for group_index in index_list:
+ value_list.append(
+ reference_data['groups'][group_index]['id'])
+ return value_list
+
+ # Go through each test in the array, processing the input params, which
+ # we build into an args dict, and then call list_role_assignments. Then
+ # check the results against those specified in the test plan.
+ for test in test_plan.get('tests', []):
+ args = {}
+ for param in test['params']:
+ if param in ['effective', 'inherited', 'include_subtree']:
+ # Just pass the value into the args
+ args[param] = test['params'][param]
+ elif param == 'source_from_group_ids':
+ # Convert the list of indexes into a list of IDs
+ args[param] = convert_group_ids_sourced_from_list(
+ test['params']['source_from_group_ids'], test_data)
+ else:
+ # Turn 'entity : 0' into 'entity_id = ac6736ba873d'
+ # where entity in user, group, project or domain
+ key, value = self._convert_entity_shorthand(
+ param, test['params'], test_data)
+ args[key] = value
+ results = self.assignment_api.list_role_assignments(**args)
+ check_results(test['results'], results, len(args))
+
+ def execute_assignment_plan(self, test_plan):
+ """Create entities, assignments and execute the test plan.
+
+ The standard method to call to create entities and assignments and
+ execute the tests as specified in the test_plan. The test_data
+ dict is returned so that, if required, the caller can execute
+ additional manual tests with the entities and assignments created.
+
+ """
+ test_data = self.create_entities(test_plan['entities'])
+ if 'implied_roles' in test_plan:
+ self.create_implied_roles(test_plan['implied_roles'], test_data)
+ if 'group_memberships' in test_plan:
+ self.create_group_memberships(test_plan['group_memberships'],
+ test_data)
+ if 'assignments' in test_plan:
+ test_data = self.create_assignments(test_plan['assignments'],
+ test_data)
+ self.execute_assignment_cases(test_plan, test_data)
+ return test_data
+
+
+class AssignmentTests(AssignmentTestHelperMixin):
+
+ def _get_domain_fixture(self):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ return domain
+
+ def test_project_add_and_remove_user_role(self):
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], user_ids)
+
+ self.assignment_api.add_role_to_user_and_project(
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_bar['id'])
+ self.assertIn(self.user_two['id'], user_ids)
+
+ self.assignment_api.remove_role_from_user_and_project(
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_bar['id'])
+ self.assertNotIn(self.user_two['id'], user_ids)
+
+ def test_remove_user_role_not_assigned(self):
+ # Expect failure if attempt to remove a role that was never assigned to
+ # the user.
+ self.assertRaises(exception.RoleNotFound,
+ self.assignment_api.
+ remove_role_from_user_and_project,
+ tenant_id=self.tenant_bar['id'],
+ user_id=self.user_two['id'],
+ role_id=self.role_other['id'])
+
+ def test_list_user_ids_for_project(self):
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ self.tenant_baz['id'])
+ self.assertEqual(2, len(user_ids))
+ self.assertIn(self.user_two['id'], user_ids)
+ self.assertIn(self.user_badguy['id'], user_ids)
+
+ def test_list_user_ids_for_project_no_duplicates(self):
+ # Create user
+ user_ref = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user_ref = self.identity_api.create_user(user_ref)
+ # Create project
+ project_ref = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(
+ project_ref['id'], project_ref)
+ # Create 2 roles and give user each role in project
+ for i in range(2):
+ role_ref = unit.new_role_ref()
+ self.role_api.create_role(role_ref['id'], role_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ user_id=user_ref['id'],
+ tenant_id=project_ref['id'],
+ role_id=role_ref['id'])
+ # Get the list of user_ids in project
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ project_ref['id'])
+ # Ensure the user is only returned once
+ self.assertEqual(1, len(user_ids))
+
+ def test_get_project_user_ids_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.list_user_ids_for_project,
+ uuid.uuid4().hex)
+
+ def test_list_role_assignments_unfiltered(self):
+ """Test unfiltered listing of role assignments."""
+ test_plan = {
+ # Create a domain, with a user, group & project
+ 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1},
+ 'roles': 3},
+ # Create a grant of each type (user/group on project/domain)
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'group': 0, 'role': 2, 'domain': 0},
+ {'group': 0, 'role': 2, 'project': 0}],
+ 'tests': [
+ # Check that we get back the 4 assignments
+ {'params': {},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'group': 0, 'role': 2, 'domain': 0},
+ {'group': 0, 'role': 2, 'project': 0}]}
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_role_assignments_filtered_by_role(self):
+ """Test listing of role assignments filtered by role ID."""
+ test_plan = {
+ # Create a user, group & project in the default domain
+ 'entities': {'domains': {'id': CONF.identity.default_domain_id,
+ 'users': 1, 'groups': 1, 'projects': 1},
+ 'roles': 3},
+ # Create a grant of each type (user/group on project/domain)
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'group': 0, 'role': 2, 'domain': 0},
+ {'group': 0, 'role': 2, 'project': 0}],
+ 'tests': [
+ # Check that when filtering by role, we only get back those
+ # that match
+ {'params': {'role': 2},
+ 'results': [{'group': 0, 'role': 2, 'domain': 0},
+ {'group': 0, 'role': 2, 'project': 0}]}
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_group_role_assignment(self):
+ # When a group role assignment is created and the role assignments are
+ # listed then the group role assignment is included in the list.
+
+ test_plan = {
+ 'entities': {'domains': {'id': CONF.identity.default_domain_id,
+ 'groups': 1, 'projects': 1},
+ 'roles': 1},
+ 'assignments': [{'group': 0, 'role': 0, 'project': 0}],
+ 'tests': [
+ {'params': {},
+ 'results': [{'group': 0, 'role': 0, 'project': 0}]}
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_role_assignments_bad_role(self):
+ assignment_list = self.assignment_api.list_role_assignments(
+ role_id=uuid.uuid4().hex)
+ self.assertEqual([], assignment_list)
+
+ def test_add_duplicate_role_grant(self):
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(self.role_admin['id'], roles_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
+ self.assertRaises(exception.Conflict,
+ self.assignment_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ self.role_admin['id'])
+
+ def test_get_role_by_user_and_project_with_user_in_group(self):
+ """Test for get role by user and project, user was added into a group.
+
+ Test Plan:
+
+ - Create a user, a project & a group, add this user to group
+ - Create roles and grant them to user and project
+ - Check the role list get by the user and project was as expected
+
+ """
+ user_ref = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user_ref = self.identity_api.create_user(user_ref)
+
+ project_ref = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project_ref['id'], project_ref)
+
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group_id = self.identity_api.create_group(group)['id']
+ self.identity_api.add_user_to_group(user_ref['id'], group_id)
+
+ role_ref_list = []
+ for i in range(2):
+ role_ref = unit.new_role_ref()
+ self.role_api.create_role(role_ref['id'], role_ref)
+ role_ref_list.append(role_ref)
+
+ self.assignment_api.add_role_to_user_and_project(
+ user_id=user_ref['id'],
+ tenant_id=project_ref['id'],
+ role_id=role_ref['id'])
+
+ role_list = self.assignment_api.get_roles_for_user_and_project(
+ user_ref['id'],
+ project_ref['id'])
+
+ self.assertEqual(set([r['id'] for r in role_ref_list]),
+ set(role_list))
+
+ def test_get_role_by_user_and_project(self):
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(self.role_admin['id'], roles_ref)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id'])
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'], roles_ref)
+ self.assertNotIn('member', roles_ref)
+
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'], roles_ref)
+ self.assertIn('member', roles_ref)
+
+ def test_get_roles_for_user_and_domain(self):
+ """Test for getting roles for user on a domain.
+
+ Test Plan:
+
+ - Create a domain, with 2 users
+ - Check no roles yet exit
+ - Give user1 two roles on the domain, user2 one role
+ - Get roles on user1 and the domain - maybe sure we only
+ get back the 2 roles on user1
+ - Delete both roles from user1
+ - Check we get no roles back for user1 on domain
+
+ """
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_user1 = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user1 = self.identity_api.create_user(new_user1)
+ new_user2 = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user2 = self.identity_api.create_user(new_user2)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user1['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ # Now create the grants (roles are defined in default_fixtures)
+ self.assignment_api.create_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.assignment_api.create_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='other')
+ self.assignment_api.create_grant(user_id=new_user2['id'],
+ domain_id=new_domain['id'],
+ role_id='admin')
+ # Read back the roles for user1 on domain
+ roles_ids = self.assignment_api.get_roles_for_user_and_domain(
+ new_user1['id'], new_domain['id'])
+ self.assertEqual(2, len(roles_ids))
+ self.assertIn(self.role_member['id'], roles_ids)
+ self.assertIn(self.role_other['id'], roles_ids)
+
+ # Now delete both grants for user1
+ self.assignment_api.delete_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ self.assignment_api.delete_grant(user_id=new_user1['id'],
+ domain_id=new_domain['id'],
+ role_id='other')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user1['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ def test_get_roles_for_user_and_domain_returns_not_found(self):
+ """Test errors raised when getting roles for user on a domain.
+
+ Test Plan:
+
+ - Check non-existing user gives UserNotFound
+ - Check non-existing domain gives DomainNotFound
+
+ """
+ new_domain = self._get_domain_fixture()
+ new_user1 = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user1 = self.identity_api.create_user(new_user1)
+
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.get_roles_for_user_and_domain,
+ uuid.uuid4().hex,
+ new_domain['id'])
+
+ self.assertRaises(exception.DomainNotFound,
+ self.assignment_api.get_roles_for_user_and_domain,
+ new_user1['id'],
+ uuid.uuid4().hex)
+
+ def test_get_roles_for_user_and_project_returns_not_found(self):
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.get_roles_for_user_and_project,
+ uuid.uuid4().hex,
+ self.tenant_bar['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.get_roles_for_user_and_project,
+ self.user_foo['id'],
+ uuid.uuid4().hex)
+
+ def test_add_role_to_user_and_project_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ uuid.uuid4().hex,
+ self.role_admin['id'])
+
+ self.assertRaises(exception.RoleNotFound,
+ self.assignment_api.add_role_to_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ uuid.uuid4().hex)
+
+ def test_add_role_to_user_and_project_no_user(self):
+ # If add_role_to_user_and_project and the user doesn't exist, then
+ # no error.
+ user_id_not_exist = uuid.uuid4().hex
+ self.assignment_api.add_role_to_user_and_project(
+ user_id_not_exist, self.tenant_bar['id'], self.role_admin['id'])
+
+ def test_remove_role_from_user_and_project(self):
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ self.assignment_api.remove_role_from_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], 'member')
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn('member', roles_ref)
+ self.assertRaises(exception.NotFound,
+ self.assignment_api.
+ remove_role_from_user_and_project,
+ self.user_foo['id'],
+ self.tenant_bar['id'],
+ 'member')
+
+ def test_get_role_grant_by_user_and_project(self):
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_admin['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertIn(self.role_admin['id'],
+ [role_ref['id'] for role_ref in roles_ref])
+
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_bar['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(self.role_admin['id'], roles_ref_ids)
+ self.assertIn('member', roles_ref_ids)
+
+ def test_remove_role_grant_from_user_and_project(self):
+ self.assignment_api.create_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertDictEqual(self.role_member, roles_ref[0])
+
+ self.assignment_api.delete_grant(user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_get_role_assignment_by_project_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ group_id=uuid.uuid4().hex,
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_get_role_assignment_by_domain_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ user_id=self.user_foo['id'],
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.check_grant_role_id,
+ group_id=uuid.uuid4().hex,
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ def test_del_role_assignment_by_project_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=self.user_foo['id'],
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=uuid.uuid4().hex,
+ project_id=self.tenant_baz['id'],
+ role_id='member')
+
+ def test_del_role_assignment_by_domain_not_found(self):
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=self.user_foo['id'],
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=uuid.uuid4().hex,
+ domain_id=self.domain_default['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_project(self):
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_group = unit.new_group_ref(domain_id=new_domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ new_user = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertDictEqual(self.role_member, roles_ref[0])
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ project_id=self.tenant_bar['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_domain(self):
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_group = unit.new_group_ref(domain_id=new_domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ new_user = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(self.role_member, roles_ref[0])
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_correct_role_grant_from_a_mix(self):
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_project = unit.new_project_ref(domain_id=new_domain['id'])
+ self.resource_api.create_project(new_project['id'], new_project)
+ new_group = unit.new_group_ref(domain_id=new_domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ new_group2 = unit.new_group_ref(domain_id=new_domain['id'])
+ new_group2 = self.identity_api.create_group(new_group2)
+ new_user = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ new_user2 = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user2 = self.identity_api.create_user(new_user2)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ # First check we have no grants
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ # Now add the grant we are going to test for, and some others as
+ # well just to make sure we get back the right one
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ self.assignment_api.create_grant(group_id=new_group2['id'],
+ domain_id=new_domain['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(user_id=new_user2['id'],
+ domain_id=new_domain['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id=self.role_admin['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(self.role_member, roles_ref[0])
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_user_and_domain(self):
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ new_user = unit.new_user_ref(domain_id=new_domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(self.role_member, roles_ref[0])
+
+ self.assignment_api.delete_grant(user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ user_id=new_user['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=new_user['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
+ group1_domain1_role = unit.new_role_ref()
+ self.role_api.create_role(group1_domain1_role['id'],
+ group1_domain1_role)
+ group1_domain2_role = unit.new_role_ref()
+ self.role_api.create_role(group1_domain2_role['id'],
+ group1_domain2_role)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=group1_domain1_role['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertDictEqual(group1_domain1_role, roles_ref[0])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertDictEqual(group1_domain2_role, roles_ref[0])
+
+ self.assignment_api.delete_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=group1_domain2_role['id'])
+
+ def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
+ user1_domain1_role = unit.new_role_ref()
+ self.role_api.create_role(user1_domain1_role['id'], user1_domain1_role)
+ user1_domain2_role = unit.new_role_ref()
+ self.role_api.create_role(user1_domain2_role['id'], user1_domain2_role)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=user1_domain1_role['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertDictEqual(user1_domain1_role, roles_ref[0])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertDictEqual(user1_domain2_role, roles_ref[0])
+
+ self.assignment_api.delete_grant(user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain2['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.RoleAssignmentNotFound,
+ self.assignment_api.delete_grant,
+ user_id=user1['id'],
+ domain_id=domain2['id'],
+ role_id=user1_domain2_role['id'])
+
+ def test_role_grant_by_group_and_cross_domain_project(self):
+ role1 = unit.new_role_ref()
+ self.role_api.create_role(role1['id'], role1)
+ role2 = unit.new_role_ref()
+ self.role_api.create_role(role2['id'], role2)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ project1 = unit.new_project_ref(domain_id=domain2['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role2['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(role1['id'], roles_ref_ids)
+ self.assertIn(role2['id'], roles_ref_ids)
+
+ self.assignment_api.delete_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.assertDictEqual(role2, roles_ref[0])
+
+ def test_role_grant_by_user_and_cross_domain_project(self):
+ role1 = unit.new_role_ref()
+ self.role_api.create_role(role1['id'], role1)
+ role2 = unit.new_role_ref()
+ self.role_api.create_role(role2['id'], role2)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ project1 = unit.new_project_ref(domain_id=domain2['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role2['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+
+ roles_ref_ids = []
+ for ref in roles_ref:
+ roles_ref_ids.append(ref['id'])
+ self.assertIn(role1['id'], roles_ref_ids)
+ self.assertIn(role2['id'], roles_ref_ids)
+
+ self.assignment_api.delete_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.assertDictEqual(role2, roles_ref[0])
+
+ def test_delete_user_grant_no_user(self):
+ # Can delete a grant where the user doesn't exist.
+ role = unit.new_role_ref()
+ role_id = role['id']
+ self.role_api.create_role(role_id, role)
+
+ user_id = uuid.uuid4().hex
+
+ self.assignment_api.create_grant(role_id, user_id=user_id,
+ project_id=self.tenant_bar['id'])
+
+ self.assignment_api.delete_grant(role_id, user_id=user_id,
+ project_id=self.tenant_bar['id'])
+
+ def test_delete_group_grant_no_group(self):
+ # Can delete a grant where the group doesn't exist.
+ role = unit.new_role_ref()
+ role_id = role['id']
+ self.role_api.create_role(role_id, role)
+
+ group_id = uuid.uuid4().hex
+
+ self.assignment_api.create_grant(role_id, group_id=group_id,
+ project_id=self.tenant_bar['id'])
+
+ self.assignment_api.delete_grant(role_id, group_id=group_id,
+ project_id=self.tenant_bar['id'])
+
+ def test_grant_crud_throws_exception_if_invalid_role(self):
+ """Ensure RoleNotFound thrown if role does not exist."""
+ def assert_role_not_found_exception(f, **kwargs):
+ self.assertRaises(exception.RoleNotFound, f,
+ role_id=uuid.uuid4().hex, **kwargs)
+
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user_resp = self.identity_api.create_user(user)
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group_resp = self.identity_api.create_group(group)
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project_resp = self.resource_api.create_project(project['id'], project)
+
+ for manager_call in [self.assignment_api.create_grant,
+ self.assignment_api.get_grant,
+ self.assignment_api.delete_grant]:
+ assert_role_not_found_exception(
+ manager_call,
+ user_id=user_resp['id'], project_id=project_resp['id'])
+ assert_role_not_found_exception(
+ manager_call,
+ group_id=group_resp['id'], project_id=project_resp['id'])
+ assert_role_not_found_exception(
+ manager_call,
+ user_id=user_resp['id'],
+ domain_id=CONF.identity.default_domain_id)
+ assert_role_not_found_exception(
+ manager_call,
+ group_id=group_resp['id'],
+ domain_id=CONF.identity.default_domain_id)
+
+ def test_multi_role_grant_by_user_group_on_project_domain(self):
+ role_list = []
+ for _ in range(10):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ group2 = unit.new_group_ref(domain_id=domain1['id'])
+ group2 = self.identity_api.create_group(group2)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[3]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[4]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[5]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[6]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[7]['id'])
+ roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[0], roles_ref)
+ self.assertIn(role_list[1], roles_ref)
+ roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[2], roles_ref)
+ self.assertIn(role_list[3], roles_ref)
+ roles_ref = self.assignment_api.list_grants(user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[4], roles_ref)
+ self.assertIn(role_list[5], roles_ref)
+ roles_ref = self.assignment_api.list_grants(group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(2, len(roles_ref))
+ self.assertIn(role_list[6], roles_ref)
+ self.assertIn(role_list[7], roles_ref)
+
+ # Now test the alternate way of getting back lists of grants,
+ # where user and group roles are combined. These should match
+ # the above results.
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(4, len(combined_list))
+ self.assertIn(role_list[4]['id'], combined_list)
+ self.assertIn(role_list[5]['id'], combined_list)
+ self.assertIn(role_list[6]['id'], combined_list)
+ self.assertIn(role_list[7]['id'], combined_list)
+
+ combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEqual(4, len(combined_role_list))
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+ self.assertIn(role_list[3]['id'], combined_role_list)
+
+ def test_multi_group_grants_on_project_domain(self):
+ """Test multiple group roles for user on project and domain.
+
+ Test Plan:
+
+ - Create 6 roles
+ - Create a domain, with a project, user and two groups
+ - Make the user a member of both groups
+ - Check no roles yet exit
+ - Assign a role to each user and both groups on both the
+ project and domain
+ - Get a list of effective roles for the user on both the
+ project and domain, checking we get back the correct three
+ roles
+
+ """
+ role_list = []
+ for _ in range(6):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ group2 = unit.new_group_ref(domain_id=domain1['id'])
+ group2 = self.identity_api.create_group(group2)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[3]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role_list[4]['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ project_id=project1['id'],
+ role_id=role_list[5]['id'])
+
+ # Read by the roles, ensuring we get the correct 3 roles for
+ # both project and domain
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(3, len(combined_list))
+ self.assertIn(role_list[3]['id'], combined_list)
+ self.assertIn(role_list[4]['id'], combined_list)
+ self.assertIn(role_list[5]['id'], combined_list)
+
+ combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEqual(3, len(combined_role_list))
+ self.assertIn(role_list[0]['id'], combined_role_list)
+ self.assertIn(role_list[1]['id'], combined_role_list)
+ self.assertIn(role_list[2]['id'], combined_role_list)
+
+ def test_delete_role_with_user_and_group_grants(self):
+ role1 = unit.new_role_ref()
+ self.role_api.create_role(role1['id'], role1)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.role_api.delete_role(role1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ def test_list_role_assignment_by_domain(self):
+ """Test listing of role assignment filtered by domain."""
+ test_plan = {
+ # A domain with 3 users, 1 group, a spoiler domain and 2 roles.
+ 'entities': {'domains': [{'users': 3, 'groups': 1}, 1],
+ 'roles': 2},
+ # Users 1 & 2 are in the group
+ 'group_memberships': [{'group': 0, 'users': [1, 2]}],
+ # Assign a role for user 0 and the group
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'group': 0, 'role': 1, 'domain': 0}],
+ 'tests': [
+ # List all effective assignments for domain[0].
+ # Should get one direct user role and user roles for each of
+ # the users in the group.
+ {'params': {'domain': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 1, 'role': 1, 'domain': 0,
+ 'indirect': {'group': 0}},
+ {'user': 2, 'role': 1, 'domain': 0,
+ 'indirect': {'group': 0}}
+ ]},
+ # Using domain[1] should return nothing
+ {'params': {'domain': 1, 'effective': True},
+ 'results': []},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_role_assignment_by_user_with_domain_group_roles(self):
+ """Test listing assignments by user, with group roles on a domain."""
+ test_plan = {
+ # A domain with 3 users, 3 groups, a spoiler domain
+ # plus 3 roles.
+ 'entities': {'domains': [{'users': 3, 'groups': 3}, 1],
+ 'roles': 3},
+ # Users 1 & 2 are in the group 0, User 1 also in group 1
+ 'group_memberships': [{'group': 0, 'users': [0, 1]},
+ {'group': 1, 'users': [0]}],
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'group': 0, 'role': 1, 'domain': 0},
+ {'group': 1, 'role': 2, 'domain': 0},
+ # ...and two spoiler assignments
+ {'user': 1, 'role': 1, 'domain': 0},
+ {'group': 2, 'role': 2, 'domain': 0}],
+ 'tests': [
+ # List all effective assignments for user[0].
+ # Should get one direct user role and a user roles for each of
+ # groups 0 and 1
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'domain': 0,
+ 'indirect': {'group': 0}},
+ {'user': 0, 'role': 2, 'domain': 0,
+ 'indirect': {'group': 1}}
+ ]},
+ # Adding domain[0] as a filter should return the same data
+ {'params': {'user': 0, 'domain': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'domain': 0,
+ 'indirect': {'group': 0}},
+ {'user': 0, 'role': 2, 'domain': 0,
+ 'indirect': {'group': 1}}
+ ]},
+ # Using domain[1] should return nothing
+ {'params': {'user': 0, 'domain': 1, 'effective': True},
+ 'results': []},
+ # Using user[2] should return nothing
+ {'params': {'user': 2, 'domain': 0, 'effective': True},
+ 'results': []},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_role_assignment_using_sourced_groups(self):
+ """Test listing assignments when restricted by source groups."""
+ test_plan = {
+ # The default domain with 3 users, 3 groups, 3 projects,
+ # plus 3 roles.
+ 'entities': {'domains': {'id': CONF.identity.default_domain_id,
+ 'users': 3, 'groups': 3, 'projects': 3},
+ 'roles': 3},
+ # Users 0 & 1 are in the group 0, User 0 also in group 1
+ 'group_memberships': [{'group': 0, 'users': [0, 1]},
+ {'group': 1, 'users': [0]}],
+ # Spread the assignments around - we want to be able to show that
+ # if sourced by group, assignments from other sources are excluded
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'group': 0, 'role': 1, 'project': 1},
+ {'group': 1, 'role': 2, 'project': 0},
+ {'group': 1, 'role': 2, 'project': 1},
+ {'user': 2, 'role': 1, 'project': 1},
+ {'group': 2, 'role': 2, 'project': 2}
+ ],
+ 'tests': [
+ # List all effective assignments sourced from groups 0 and 1
+ {'params': {'source_from_group_ids': [0, 1],
+ 'effective': True},
+ 'results': [{'group': 0, 'role': 1, 'project': 1},
+ {'group': 1, 'role': 2, 'project': 0},
+ {'group': 1, 'role': 2, 'project': 1}
+ ]},
+ # Adding a role a filter should further restrict the entries
+ {'params': {'source_from_group_ids': [0, 1], 'role': 2,
+ 'effective': True},
+ 'results': [{'group': 1, 'role': 2, 'project': 0},
+ {'group': 1, 'role': 2, 'project': 1}
+ ]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_role_assignment_using_sourced_groups_with_domains(self):
+ """Test listing domain assignments when restricted by source groups."""
+ test_plan = {
+ # A domain with 3 users, 3 groups, 3 projects, a second domain,
+ # plus 3 roles.
+ 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3},
+ 1],
+ 'roles': 3},
+ # Users 0 & 1 are in the group 0, User 0 also in group 1
+ 'group_memberships': [{'group': 0, 'users': [0, 1]},
+ {'group': 1, 'users': [0]}],
+ # Spread the assignments around - we want to be able to show that
+ # if sourced by group, assignments from other sources are excluded
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'group': 0, 'role': 1, 'domain': 1},
+ {'group': 1, 'role': 2, 'project': 0},
+ {'group': 1, 'role': 2, 'project': 1},
+ {'user': 2, 'role': 1, 'project': 1},
+ {'group': 2, 'role': 2, 'project': 2}
+ ],
+ 'tests': [
+ # List all effective assignments sourced from groups 0 and 1
+ {'params': {'source_from_group_ids': [0, 1],
+ 'effective': True},
+ 'results': [{'group': 0, 'role': 1, 'domain': 1},
+ {'group': 1, 'role': 2, 'project': 0},
+ {'group': 1, 'role': 2, 'project': 1}
+ ]},
+ # Adding a role a filter should further restrict the entries
+ {'params': {'source_from_group_ids': [0, 1], 'role': 1,
+ 'effective': True},
+ 'results': [{'group': 0, 'role': 1, 'domain': 1},
+ ]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_role_assignment_fails_with_userid_and_source_groups(self):
+ """Show we trap this unsupported internal combination of params."""
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+ self.assertRaises(exception.UnexpectedError,
+ self.assignment_api.list_role_assignments,
+ effective=True,
+ user_id=self.user_foo['id'],
+ source_from_group_ids=[group['id']])
+
+ def test_add_user_to_project(self):
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.assignment_api.list_projects_for_user(
+ self.user_foo['id'])
+ self.assertIn(self.tenant_baz, tenants)
+
+ def test_add_user_to_project_missing_default_role(self):
+ self.role_api.delete_role(CONF.member_role_id)
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ CONF.member_role_id)
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = (
+ self.assignment_api.list_projects_for_user(self.user_foo['id']))
+ self.assertIn(self.tenant_baz, tenants)
+ default_role = self.role_api.get_role(CONF.member_role_id)
+ self.assertIsNotNone(default_role)
+
+ def test_add_user_to_project_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.add_user_to_project,
+ uuid.uuid4().hex,
+ self.user_foo['id'])
+
+ def test_add_user_to_project_no_user(self):
+ # If add_user_to_project and the user doesn't exist, then
+ # no error.
+ user_id_not_exist = uuid.uuid4().hex
+ self.assignment_api.add_user_to_project(self.tenant_bar['id'],
+ user_id_not_exist)
+
+ def test_remove_user_from_project(self):
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.assignment_api.list_projects_for_user(
+ self.user_foo['id'])
+ self.assertNotIn(self.tenant_baz, tenants)
+
+ def test_remove_user_from_project_race_delete_role(self):
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ tenant_id=self.tenant_baz['id'],
+ user_id=self.user_foo['id'],
+ role_id=self.role_other['id'])
+
+ # Mock a race condition, delete a role after
+ # get_roles_for_user_and_project() is called in
+ # remove_user_from_project().
+ roles = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_baz['id'])
+ self.role_api.delete_role(self.role_other['id'])
+ self.assignment_api.get_roles_for_user_and_project = mock.Mock(
+ return_value=roles)
+ self.assignment_api.remove_user_from_project(self.tenant_baz['id'],
+ self.user_foo['id'])
+ tenants = self.assignment_api.list_projects_for_user(
+ self.user_foo['id'])
+ self.assertNotIn(self.tenant_baz, tenants)
+
+ def test_remove_user_from_project_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.remove_user_from_project,
+ uuid.uuid4().hex,
+ self.user_foo['id'])
+
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.remove_user_from_project,
+ self.tenant_bar['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.NotFound,
+ self.assignment_api.remove_user_from_project,
+ self.tenant_baz['id'],
+ self.user_foo['id'])
+
+ def test_list_user_project_ids_returns_not_found(self):
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.list_projects_for_user,
+ uuid.uuid4().hex)
+
+ def test_delete_user_with_project_association(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_bar['id'],
+ user['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.list_projects_for_user,
+ user['id'])
+
+ def test_delete_user_with_project_roles(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ self.assignment_api.add_role_to_user_and_project(
+ user['id'],
+ self.tenant_bar['id'],
+ self.role_member['id'])
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.assignment_api.list_projects_for_user,
+ user['id'])
+
+ def test_delete_role_returns_not_found(self):
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.delete_role,
+ uuid.uuid4().hex)
+
+ def test_delete_project_with_role_assignments(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project['id'], project)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], project['id'], 'member')
+ self.resource_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.assignment_api.list_user_ids_for_project,
+ project['id'])
+
+ def test_delete_role_check_role_grant(self):
+ role = unit.new_role_ref()
+ alt_role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ self.role_api.create_role(alt_role['id'], alt_role)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], role['id'])
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'], alt_role['id'])
+ self.role_api.delete_role(role['id'])
+ roles_ref = self.assignment_api.get_roles_for_user_and_project(
+ self.user_foo['id'], self.tenant_bar['id'])
+ self.assertNotIn(role['id'], roles_ref)
+ self.assertIn(alt_role['id'], roles_ref)
+
+ def test_list_projects_for_user(self):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = unit.new_user_ref(domain_id=domain['id'])
+ user1 = self.identity_api.create_user(user1)
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(0, len(user_projects))
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_baz['id'],
+ role_id=self.role_member['id'])
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(2, len(user_projects))
+
+ def test_list_projects_for_user_with_grants(self):
+ # Create two groups each with a role on a different project, and
+ # make user1 a member of both groups. Both these new projects
+ # should now be included, along with any direct user grants.
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = unit.new_user_ref(domain_id=domain['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain['id'])
+ group1 = self.identity_api.create_group(group1)
+ group2 = unit.new_group_ref(domain_id=domain['id'])
+ group2 = self.identity_api.create_group(group2)
+ project1 = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ self.identity_api.add_user_to_group(user1['id'], group1['id'])
+ self.identity_api.add_user_to_group(user1['id'], group2['id'])
+
+ # Create 3 grants, one user grant, the other two as group grants
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=self.role_admin['id'])
+ self.assignment_api.create_grant(group_id=group2['id'],
+ project_id=project2['id'],
+ role_id=self.role_admin['id'])
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(3, len(user_projects))
+
+ def test_create_grant_no_user(self):
+ # If call create_grant with a user that doesn't exist, doesn't fail.
+ self.assignment_api.create_grant(
+ self.role_other['id'],
+ user_id=uuid.uuid4().hex,
+ project_id=self.tenant_bar['id'])
+
+ def test_create_grant_no_group(self):
+ # If call create_grant with a group that doesn't exist, doesn't fail.
+ self.assignment_api.create_grant(
+ self.role_other['id'],
+ group_id=uuid.uuid4().hex,
+ project_id=self.tenant_bar['id'])
+
+ def test_delete_group_removes_role_assignments(self):
+ # When a group is deleted any role assignments for the group are
+ # removed.
+
+ MEMBER_ROLE_ID = 'member'
+
+ def get_member_assignments():
+ assignments = self.assignment_api.list_role_assignments()
+ return [x for x in assignments if x['role_id'] == MEMBER_ROLE_ID]
+
+ orig_member_assignments = get_member_assignments()
+
+ # Create a group.
+ new_group = unit.new_group_ref(
+ domain_id=CONF.identity.default_domain_id)
+ new_group = self.identity_api.create_group(new_group)
+
+ # Create a project.
+ new_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ # Assign a role to the group.
+ self.assignment_api.create_grant(
+ group_id=new_group['id'], project_id=new_project['id'],
+ role_id=MEMBER_ROLE_ID)
+
+ # Delete the group.
+ self.identity_api.delete_group(new_group['id'])
+
+ # Check that the role assignment for the group is gone
+ member_assignments = get_member_assignments()
+
+ self.assertThat(member_assignments,
+ matchers.Equals(orig_member_assignments))
+
+ def test_get_roles_for_groups_on_domain(self):
+ """Test retrieving group domain roles.
+
+ Test Plan:
+
+ - Create a domain, three groups and three roles
+ - Assign one an inherited and the others a non-inherited group role
+ to the domain
+ - Ensure that only the non-inherited roles are returned on the domain
+
+ """
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ group_list = []
+ group_id_list = []
+ role_list = []
+ for _ in range(3):
+ group = unit.new_group_ref(domain_id=domain1['id'])
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+ group_id_list.append(group['id'])
+
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Assign the roles - one is inherited
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the groups on the domain project. We
+ # shouldn't get back the inherited role.
+
+ role_refs = self.assignment_api.get_roles_for_groups(
+ group_id_list, domain_id=domain1['id'])
+
+ self.assertThat(role_refs, matchers.HasLength(2))
+ self.assertIn(role_list[0], role_refs)
+ self.assertIn(role_list[1], role_refs)
+
+ def test_get_roles_for_groups_on_project(self):
+ """Test retrieving group project roles.
+
+ Test Plan:
+
+ - Create two domains, two projects, six groups and six roles
+ - Project1 is in Domain1, Project2 is in Domain2
+ - Domain2/Project2 are spoilers
+ - Assign a different direct group role to each project as well
+ as both an inherited and non-inherited role to each domain
+ - Get the group roles for Project 1 - depending on whether we have
+ enabled inheritance, we should either get back just the direct role
+ or both the direct one plus the inherited domain role from Domain 1
+
+ """
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = unit.new_project_ref(domain_id=domain2['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ group_list = []
+ group_id_list = []
+ role_list = []
+ for _ in range(6):
+ group = unit.new_group_ref(domain_id=domain1['id'])
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+ group_id_list.append(group['id'])
+
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Assign the roles - one inherited and one non-inherited on Domain1,
+ # plus one on Project1
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ project_id=project1['id'],
+ role_id=role_list[2]['id'])
+
+ # ...and a duplicate set of spoiler assignments to Domain2/Project2
+ self.assignment_api.create_grant(group_id=group_list[3]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[3]['id'])
+ self.assignment_api.create_grant(group_id=group_list[4]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[4]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[5]['id'],
+ project_id=project2['id'],
+ role_id=role_list[5]['id'])
+
+ # Now get the effective roles for all groups on the Project1. With
+ # inheritance off, we should only get back the direct role.
+
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ role_refs = self.assignment_api.get_roles_for_groups(
+ group_id_list, project_id=project1['id'])
+
+ self.assertThat(role_refs, matchers.HasLength(1))
+ self.assertIn(role_list[2], role_refs)
+
+ # With inheritance on, we should also get back the inherited role from
+ # its owning domain.
+
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ role_refs = self.assignment_api.get_roles_for_groups(
+ group_id_list, project_id=project1['id'])
+
+ self.assertThat(role_refs, matchers.HasLength(2))
+ self.assertIn(role_list[1], role_refs)
+ self.assertIn(role_list[2], role_refs)
+
+ def test_list_domains_for_groups(self):
+ """Test retrieving domains for a list of groups.
+
+ Test Plan:
+
+ - Create three domains, three groups and one role
+ - Assign a non-inherited group role to two domains, and an inherited
+ group role to the third
+ - Ensure only the domains with non-inherited roles are returned
+
+ """
+ domain_list = []
+ group_list = []
+ group_id_list = []
+ for _ in range(3):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ domain_list.append(domain)
+
+ group = unit.new_group_ref(domain_id=domain['id'])
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+ group_id_list.append(group['id'])
+
+ role1 = unit.new_role_ref()
+ self.role_api.create_role(role1['id'], role1)
+
+ # Assign the roles - one is inherited
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain_list[0]['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain_list[1]['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ domain_id=domain_list[2]['id'],
+ role_id=role1['id'],
+ inherited_to_projects=True)
+
+ # Now list the domains that have roles for any of the 3 groups
+ # We shouldn't get back domain[2] since that had an inherited role.
+
+ domain_refs = (
+ self.assignment_api.list_domains_for_groups(group_id_list))
+
+ self.assertThat(domain_refs, matchers.HasLength(2))
+ self.assertIn(domain_list[0], domain_refs)
+ self.assertIn(domain_list[1], domain_refs)
+
+ def test_list_projects_for_groups(self):
+ """Test retrieving projects for a list of groups.
+
+ Test Plan:
+
+ - Create two domains, four projects, seven groups and seven roles
+ - Project1-3 are in Domain1, Project4 is in Domain2
+ - Domain2/Project4 are spoilers
+ - Project1 and 2 have direct group roles, Project3 has no direct
+ roles but should inherit a group role from Domain1
+ - Get the projects for the group roles that are assigned to Project1
+ Project2 and the inherited one on Domain1. Depending on whether we
+ have enabled inheritance, we should either get back just the projects
+ with direct roles (Project 1 and 2) or also Project3 due to its
+ inherited role from Domain1.
+
+ """
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ project1 = self.resource_api.create_project(project1['id'], project1)
+ project2 = unit.new_project_ref(domain_id=domain1['id'])
+ project2 = self.resource_api.create_project(project2['id'], project2)
+ project3 = unit.new_project_ref(domain_id=domain1['id'])
+ project3 = self.resource_api.create_project(project3['id'], project3)
+ project4 = unit.new_project_ref(domain_id=domain2['id'])
+ project4 = self.resource_api.create_project(project4['id'], project4)
+ group_list = []
+ role_list = []
+ for _ in range(7):
+ group = unit.new_group_ref(domain_id=domain1['id'])
+ group = self.identity_api.create_group(group)
+ group_list.append(group)
+
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+
+ # Assign the roles - one inherited and one non-inherited on Domain1,
+ # plus one on Project1 and Project2
+ self.assignment_api.create_grant(group_id=group_list[0]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group_list[1]['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[2]['id'],
+ project_id=project1['id'],
+ role_id=role_list[2]['id'])
+ self.assignment_api.create_grant(group_id=group_list[3]['id'],
+ project_id=project2['id'],
+ role_id=role_list[3]['id'])
+
+ # ...and a few of spoiler assignments to Domain2/Project4
+ self.assignment_api.create_grant(group_id=group_list[4]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[4]['id'])
+ self.assignment_api.create_grant(group_id=group_list[5]['id'],
+ domain_id=domain2['id'],
+ role_id=role_list[5]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group_list[6]['id'],
+ project_id=project4['id'],
+ role_id=role_list[6]['id'])
+
+ # Now get the projects for the groups that have roles on Project1,
+ # Project2 and the inherited role on Domain!. With inheritance off,
+ # we should only get back the projects with direct role.
+
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ group_id_list = [group_list[1]['id'], group_list[2]['id'],
+ group_list[3]['id']]
+ project_refs = (
+ self.assignment_api.list_projects_for_groups(group_id_list))
+
+ self.assertThat(project_refs, matchers.HasLength(2))
+ self.assertIn(project1, project_refs)
+ self.assertIn(project2, project_refs)
+
+ # With inheritance on, we should also get back the Project3 due to the
+ # inherited role from its owning domain.
+
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ project_refs = (
+ self.assignment_api.list_projects_for_groups(group_id_list))
+
+ self.assertThat(project_refs, matchers.HasLength(3))
+ self.assertIn(project1, project_refs)
+ self.assertIn(project2, project_refs)
+ self.assertIn(project3, project_refs)
+
+ def test_update_role_no_name(self):
+ # A user can update a role and not include the name.
+
+ # description is picked just because it's not name.
+ self.role_api.update_role(self.role_member['id'],
+ {'description': uuid.uuid4().hex})
+ # If the previous line didn't raise an exception then the test passes.
+
+ def test_update_role_same_name(self):
+ # A user can update a role and set the name to be the same as it was.
+
+ self.role_api.update_role(self.role_member['id'],
+ {'name': self.role_member['name']})
+ # If the previous line didn't raise an exception then the test passes.
+
+ def test_list_role_assignment_containing_names(self):
+ # Create Refs
+ new_role = unit.new_role_ref()
+ new_domain = self._get_domain_fixture()
+ new_user = unit.new_user_ref(domain_id=new_domain['id'])
+ new_project = unit.new_project_ref(domain_id=new_domain['id'])
+ new_group = unit.new_group_ref(domain_id=new_domain['id'])
+ # Create entities
+ new_role = self.role_api.create_role(new_role['id'], new_role)
+ new_user = self.identity_api.create_user(new_user)
+ new_group = self.identity_api.create_group(new_group)
+ self.resource_api.create_project(new_project['id'], new_project)
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=new_project['id'],
+ role_id=new_role['id'])
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ project_id=new_project['id'],
+ role_id=new_role['id'])
+ self.assignment_api.create_grant(domain_id=new_domain['id'],
+ user_id=new_user['id'],
+ role_id=new_role['id'])
+ # Get the created assignments with the include_names flag
+ _asgmt_prj = self.assignment_api.list_role_assignments(
+ user_id=new_user['id'],
+ project_id=new_project['id'],
+ include_names=True)
+ _asgmt_grp = self.assignment_api.list_role_assignments(
+ group_id=new_group['id'],
+ project_id=new_project['id'],
+ include_names=True)
+ _asgmt_dmn = self.assignment_api.list_role_assignments(
+ domain_id=new_domain['id'],
+ user_id=new_user['id'],
+ include_names=True)
+ # Make sure we can get back the correct number of assignments
+ self.assertThat(_asgmt_prj, matchers.HasLength(1))
+ self.assertThat(_asgmt_grp, matchers.HasLength(1))
+ self.assertThat(_asgmt_dmn, matchers.HasLength(1))
+ # get the first assignment
+ first_asgmt_prj = _asgmt_prj[0]
+ first_asgmt_grp = _asgmt_grp[0]
+ first_asgmt_dmn = _asgmt_dmn[0]
+ # Assert the names are correct in the project response
+ self.assertEqual(new_project['name'],
+ first_asgmt_prj['project_name'])
+ self.assertEqual(new_project['domain_id'],
+ first_asgmt_prj['project_domain_id'])
+ self.assertEqual(new_user['name'],
+ first_asgmt_prj['user_name'])
+ self.assertEqual(new_user['domain_id'],
+ first_asgmt_prj['user_domain_id'])
+ self.assertEqual(new_role['name'],
+ first_asgmt_prj['role_name'])
+ # Assert the names are correct in the group response
+ self.assertEqual(new_group['name'],
+ first_asgmt_grp['group_name'])
+ self.assertEqual(new_group['domain_id'],
+ first_asgmt_grp['group_domain_id'])
+ self.assertEqual(new_project['name'],
+ first_asgmt_grp['project_name'])
+ self.assertEqual(new_project['domain_id'],
+ first_asgmt_grp['project_domain_id'])
+ self.assertEqual(new_role['name'],
+ first_asgmt_grp['role_name'])
+ # Assert the names are correct in the domain response
+ self.assertEqual(new_domain['name'],
+ first_asgmt_dmn['domain_name'])
+ self.assertEqual(new_user['name'],
+ first_asgmt_dmn['user_name'])
+ self.assertEqual(new_user['domain_id'],
+ first_asgmt_dmn['user_domain_id'])
+ self.assertEqual(new_role['name'],
+ first_asgmt_dmn['role_name'])
+
+ def test_list_role_assignment_does_not_contain_names(self):
+ """Test names are not included with list role assignments.
+
+ Scenario:
+ - names are NOT included by default
+ - names are NOT included when include_names=False
+
+ """
+ def assert_does_not_contain_names(assignment):
+ first_asgmt_prj = assignment[0]
+ self.assertNotIn('project_name', first_asgmt_prj)
+ self.assertNotIn('project_domain_id', first_asgmt_prj)
+ self.assertNotIn('user_name', first_asgmt_prj)
+ self.assertNotIn('user_domain_id', first_asgmt_prj)
+ self.assertNotIn('role_name', first_asgmt_prj)
+
+ # Create Refs
+ new_role = unit.new_role_ref()
+ new_domain = self._get_domain_fixture()
+ new_user = unit.new_user_ref(domain_id=new_domain['id'])
+ new_project = unit.new_project_ref(domain_id=new_domain['id'])
+ # Create entities
+ new_role = self.role_api.create_role(new_role['id'], new_role)
+ new_user = self.identity_api.create_user(new_user)
+ self.resource_api.create_project(new_project['id'], new_project)
+ self.assignment_api.create_grant(user_id=new_user['id'],
+ project_id=new_project['id'],
+ role_id=new_role['id'])
+ # Get the created assignments with NO include_names flag
+ role_assign_without_names = self.assignment_api.list_role_assignments(
+ user_id=new_user['id'],
+ project_id=new_project['id'])
+ assert_does_not_contain_names(role_assign_without_names)
+ # Get the created assignments with include_names=False
+ role_assign_without_names = self.assignment_api.list_role_assignments(
+ user_id=new_user['id'],
+ project_id=new_project['id'],
+ include_names=False)
+ assert_does_not_contain_names(role_assign_without_names)
+
+ def test_delete_user_assignments_user_same_id_as_group(self):
+ """Test deleting user assignments when user_id == group_id.
+
+ In this scenario, only user assignments must be deleted (i.e.
+ USER_DOMAIN or USER_PROJECT).
+
+ Test plan:
+ * Create a user and a group with the same ID;
+ * Create four roles and assign them to both user and group;
+ * Delete all user assignments;
+ * Group assignments must stay intact.
+ """
+ # Create a common ID
+ common_id = uuid.uuid4().hex
+ # Create a project
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project = self.resource_api.create_project(project['id'], project)
+ # Create a user
+ user = unit.new_user_ref(id=common_id,
+ domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.driver.create_user(common_id, user)
+ self.assertEqual(common_id, user['id'])
+ # Create a group
+ group = unit.new_group_ref(id=common_id,
+ domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.driver.create_group(common_id, group)
+ self.assertEqual(common_id, group['id'])
+ # Create four roles
+ roles = []
+ for _ in range(4):
+ role = unit.new_role_ref()
+ roles.append(self.role_api.create_role(role['id'], role))
+ # Assign roles for user
+ self.assignment_api.driver.create_grant(
+ user_id=user['id'], domain_id=CONF.identity.default_domain_id,
+ role_id=roles[0]['id'])
+ self.assignment_api.driver.create_grant(user_id=user['id'],
+ project_id=project['id'],
+ role_id=roles[1]['id'])
+ # Assign roles for group
+ self.assignment_api.driver.create_grant(
+ group_id=group['id'], domain_id=CONF.identity.default_domain_id,
+ role_id=roles[2]['id'])
+ self.assignment_api.driver.create_grant(group_id=group['id'],
+ project_id=project['id'],
+ role_id=roles[3]['id'])
+ # Make sure they were assigned
+ user_assignments = self.assignment_api.list_role_assignments(
+ user_id=user['id'])
+ self.assertThat(user_assignments, matchers.HasLength(2))
+ group_assignments = self.assignment_api.list_role_assignments(
+ group_id=group['id'])
+ self.assertThat(group_assignments, matchers.HasLength(2))
+ # Delete user assignments
+ self.assignment_api.delete_user_assignments(user_id=user['id'])
+ # Assert only user assignments were deleted
+ user_assignments = self.assignment_api.list_role_assignments(
+ user_id=user['id'])
+ self.assertThat(user_assignments, matchers.HasLength(0))
+ group_assignments = self.assignment_api.list_role_assignments(
+ group_id=group['id'])
+ self.assertThat(group_assignments, matchers.HasLength(2))
+ # Make sure these remaining assignments are group-related
+ for assignment in group_assignments:
+ self.assertThat(assignment.keys(), matchers.Contains('group_id'))
+
+ def test_delete_group_assignments_group_same_id_as_user(self):
+ """Test deleting group assignments when group_id == user_id.
+
+ In this scenario, only group assignments must be deleted (i.e.
+ GROUP_DOMAIN or GROUP_PROJECT).
+
+ Test plan:
+ * Create a group and a user with the same ID;
+ * Create four roles and assign them to both group and user;
+ * Delete all group assignments;
+ * User assignments must stay intact.
+ """
+ # Create a common ID
+ common_id = uuid.uuid4().hex
+ # Create a project
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project = self.resource_api.create_project(project['id'], project)
+ # Create a user
+ user = unit.new_user_ref(id=common_id,
+ domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.driver.create_user(common_id, user)
+ self.assertEqual(common_id, user['id'])
+ # Create a group
+ group = unit.new_group_ref(id=common_id,
+ domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.driver.create_group(common_id, group)
+ self.assertEqual(common_id, group['id'])
+ # Create four roles
+ roles = []
+ for _ in range(4):
+ role = unit.new_role_ref()
+ roles.append(self.role_api.create_role(role['id'], role))
+ # Assign roles for user
+ self.assignment_api.driver.create_grant(
+ user_id=user['id'], domain_id=CONF.identity.default_domain_id,
+ role_id=roles[0]['id'])
+ self.assignment_api.driver.create_grant(user_id=user['id'],
+ project_id=project['id'],
+ role_id=roles[1]['id'])
+ # Assign roles for group
+ self.assignment_api.driver.create_grant(
+ group_id=group['id'], domain_id=CONF.identity.default_domain_id,
+ role_id=roles[2]['id'])
+ self.assignment_api.driver.create_grant(group_id=group['id'],
+ project_id=project['id'],
+ role_id=roles[3]['id'])
+ # Make sure they were assigned
+ user_assignments = self.assignment_api.list_role_assignments(
+ user_id=user['id'])
+ self.assertThat(user_assignments, matchers.HasLength(2))
+ group_assignments = self.assignment_api.list_role_assignments(
+ group_id=group['id'])
+ self.assertThat(group_assignments, matchers.HasLength(2))
+ # Delete group assignments
+ self.assignment_api.delete_group_assignments(group_id=group['id'])
+ # Assert only group assignments were deleted
+ group_assignments = self.assignment_api.list_role_assignments(
+ group_id=group['id'])
+ self.assertThat(group_assignments, matchers.HasLength(0))
+ user_assignments = self.assignment_api.list_role_assignments(
+ user_id=user['id'])
+ self.assertThat(user_assignments, matchers.HasLength(2))
+ # Make sure these remaining assignments are user-related
+ for assignment in group_assignments:
+ self.assertThat(assignment.keys(), matchers.Contains('user_id'))
+
+ def test_remove_foreign_assignments_when_deleting_a_domain(self):
+ # A user and a group are in default domain and have assigned a role on
+ # two new domains. This test makes sure that when one of the new
+ # domains is deleted, the role assignments for the user and the group
+ # from the default domain are deleted only on that domain.
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+
+ role = unit.new_role_ref()
+ role = self.role_api.create_role(role['id'], role)
+
+ new_domains = [unit.new_domain_ref(), unit.new_domain_ref()]
+ for new_domain in new_domains:
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+
+ self.assignment_api.create_grant(group_id=group['id'],
+ domain_id=new_domain['id'],
+ role_id=role['id'])
+ self.assignment_api.create_grant(user_id=self.user_two['id'],
+ domain_id=new_domain['id'],
+ role_id=role['id'])
+
+ # Check there are 4 role assignments for that role
+ role_assignments = self.assignment_api.list_role_assignments(
+ role_id=role['id'])
+ self.assertThat(role_assignments, matchers.HasLength(4))
+
+ # Delete first new domain and check only 2 assignments were left
+ self.resource_api.update_domain(new_domains[0]['id'],
+ {'enabled': False})
+ self.resource_api.delete_domain(new_domains[0]['id'])
+
+ role_assignments = self.assignment_api.list_role_assignments(
+ role_id=role['id'])
+ self.assertThat(role_assignments, matchers.HasLength(2))
+
+ # Delete second new domain and check no assignments were left
+ self.resource_api.update_domain(new_domains[1]['id'],
+ {'enabled': False})
+ self.resource_api.delete_domain(new_domains[1]['id'])
+
+ role_assignments = self.assignment_api.list_role_assignments(
+ role_id=role['id'])
+ self.assertEqual([], role_assignments)
+
+
+class InheritanceTests(AssignmentTestHelperMixin):
+
+ def test_role_assignments_user_domain_to_project_inheritance(self):
+ test_plan = {
+ 'entities': {'domains': {'users': 2, 'projects': 1},
+ 'roles': 3},
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'user': 0, 'role': 2, 'domain': 0,
+ 'inherited_to_projects': True},
+ {'user': 1, 'role': 1, 'project': 0}],
+ 'tests': [
+ # List all direct assignments for user[0]
+ {'params': {'user': 0},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'user': 0, 'role': 2, 'domain': 0,
+ 'inherited_to_projects': 'projects'}]},
+ # Now the effective ones - so the domain role should turn into
+ # a project role
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'domain': 0}}]},
+ # Narrow down to effective roles for user[0] and project[0]
+ {'params': {'user': 0, 'project': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 1, 'project': 0},
+ {'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'domain': 0}}]}
+ ]
+ }
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ self.execute_assignment_plan(test_plan)
+
+ def test_inherited_role_assignments_excluded_if_os_inherit_false(self):
+ test_plan = {
+ 'entities': {'domains': {'users': 2, 'groups': 1, 'projects': 1},
+ 'roles': 4},
+ 'group_memberships': [{'group': 0, 'users': [0]}],
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'user': 0, 'role': 2, 'domain': 0,
+ 'inherited_to_projects': True},
+ {'user': 1, 'role': 1, 'project': 0},
+ {'group': 0, 'role': 3, 'project': 0}],
+ 'tests': [
+ # List all direct assignments for user[0], since os-inherit is
+ # disabled, we should not see the inherited role
+ {'params': {'user': 0},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0}]},
+ # Same in effective mode - inherited roles should not be
+ # included or expanded...but the group role should now
+ # turn up as a user role, since group expansion is not
+ # part of os-inherit.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'project': 0},
+ {'user': 0, 'role': 3, 'project': 0,
+ 'indirect': {'group': 0}}]},
+ ]
+ }
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ self.execute_assignment_plan(test_plan)
+
+ def _test_crud_inherited_and_direct_assignment(self, **kwargs):
+ """Tests inherited and direct assignments for the actor and target
+
+ Ensure it is possible to create both inherited and direct role
+ assignments for the same actor on the same target. The actor and the
+ target are specified in the kwargs as ('user_id' or 'group_id') and
+ ('project_id' or 'domain_id'), respectively.
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ # Create a new role to avoid assignments loaded from default fixtures
+ role = unit.new_role_ref()
+ role = self.role_api.create_role(role['id'], role)
+
+ # Define the common assignment entity
+ assignment_entity = {'role_id': role['id']}
+ assignment_entity.update(kwargs)
+
+ # Define assignments under test
+ direct_assignment_entity = assignment_entity.copy()
+ inherited_assignment_entity = assignment_entity.copy()
+ inherited_assignment_entity['inherited_to_projects'] = 'projects'
+
+ # Create direct assignment and check grants
+ self.assignment_api.create_grant(inherited_to_projects=False,
+ **assignment_entity)
+
+ grants = self.assignment_api.list_role_assignments(role_id=role['id'])
+ self.assertThat(grants, matchers.HasLength(1))
+ self.assertIn(direct_assignment_entity, grants)
+
+ # Now add inherited assignment and check grants
+ self.assignment_api.create_grant(inherited_to_projects=True,
+ **assignment_entity)
+
+ grants = self.assignment_api.list_role_assignments(role_id=role['id'])
+ self.assertThat(grants, matchers.HasLength(2))
+ self.assertIn(direct_assignment_entity, grants)
+ self.assertIn(inherited_assignment_entity, grants)
+
+ # Delete both and check grants
+ self.assignment_api.delete_grant(inherited_to_projects=False,
+ **assignment_entity)
+ self.assignment_api.delete_grant(inherited_to_projects=True,
+ **assignment_entity)
+
+ grants = self.assignment_api.list_role_assignments(role_id=role['id'])
+ self.assertEqual([], grants)
+
+ def test_crud_inherited_and_direct_assignment_for_user_on_domain(self):
+ self._test_crud_inherited_and_direct_assignment(
+ user_id=self.user_foo['id'],
+ domain_id=CONF.identity.default_domain_id)
+
+ def test_crud_inherited_and_direct_assignment_for_group_on_domain(self):
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+
+ self._test_crud_inherited_and_direct_assignment(
+ group_id=group['id'], domain_id=CONF.identity.default_domain_id)
+
+ def test_crud_inherited_and_direct_assignment_for_user_on_project(self):
+ self._test_crud_inherited_and_direct_assignment(
+ user_id=self.user_foo['id'], project_id=self.tenant_baz['id'])
+
+ def test_crud_inherited_and_direct_assignment_for_group_on_project(self):
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+
+ self._test_crud_inherited_and_direct_assignment(
+ group_id=group['id'], project_id=self.tenant_baz['id'])
+
+ def test_inherited_role_grants_for_user(self):
+ """Test inherited user roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create 3 roles
+ - Create a domain, with a project and a user
+ - Check no roles yet exit
+ - Assign a direct user role to the project and a (non-inherited)
+ user role to the domain
+ - Get a list of effective roles - should only get the one direct role
+ - Now add an inherited user role to the domain
+ - Get a list of effective roles - should have two roles, one
+ direct and one by virtue of the inherited user role
+ - Also get effective roles for the domain - the role marked as
+ inherited should not show up
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ role_list = []
+ for _ in range(3):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ # Create the first two roles - the domain one is not inherited
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+
+ # Now get the effective roles for the user and project, this
+ # should only include the direct role assignment on the project
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(1, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+
+ # Now add an inherited role on the domain
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the user and project again, this
+ # should now include the inherited role on the domain
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(2, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+ self.assertIn(role_list[2]['id'], combined_list)
+
+ # Finally, check that the inherited role does not appear as a valid
+ # directly assigned role on the domain itself
+ combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
+ user1['id'], domain1['id'])
+ self.assertEqual(1, len(combined_role_list))
+ self.assertIn(role_list[1]['id'], combined_role_list)
+
+ # TODO(henry-nash): The test above uses get_roles_for_user_and_project
+ # and get_roles_for_user_and_domain, which will, in a subsequent patch,
+ # be re-implemented to simply call list_role_assignments (see blueprint
+ # remove-role-metadata).
+ #
+ # The test plan below therefore mirrors this test, to ensure that
+ # list_role_assignments works the same. Once get_roles_for_user_and
+ # project/domain have been re-implemented then the manual tests above
+ # can be refactored to simply ensure it gives the same answers.
+ test_plan = {
+ # A domain with a user & project, plus 3 roles.
+ 'entities': {'domains': {'users': 1, 'projects': 1},
+ 'roles': 3},
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 1, 'domain': 0},
+ {'user': 0, 'role': 2, 'domain': 0,
+ 'inherited_to_projects': True}],
+ 'tests': [
+ # List all effective assignments for user[0] on project[0].
+ # Should get one direct role and one inherited role.
+ {'params': {'user': 0, 'project': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'domain': 0}}]},
+ # Ensure effective mode on the domain does not list the
+ # inherited role on that domain
+ {'params': {'user': 0, 'domain': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 1, 'domain': 0}]},
+ # Ensure non-inherited mode also only returns the non-inherited
+ # role on the domain
+ {'params': {'user': 0, 'domain': 0, 'inherited': False},
+ 'results': [{'user': 0, 'role': 1, 'domain': 0}]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_inherited_role_grants_for_group(self):
+ """Test inherited group roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create 4 roles
+ - Create a domain, with a project, user and two groups
+ - Make the user a member of both groups
+ - Check no roles yet exit
+ - Assign a direct user role to the project and a (non-inherited)
+ group role on the domain
+ - Get a list of effective roles - should only get the one direct role
+ - Now add two inherited group roles to the domain
+ - Get a list of effective roles - should have three roles, one
+ direct and two by virtue of inherited group roles
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ role_list = []
+ for _ in range(4):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_list.append(role)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ group2 = unit.new_group_ref(domain_id=domain1['id'])
+ group2 = self.identity_api.create_group(group2)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+
+ self.identity_api.add_user_to_group(user1['id'],
+ group1['id'])
+ self.identity_api.add_user_to_group(user1['id'],
+ group2['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ # Create two roles - the domain one is not inherited
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role_list[0]['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[1]['id'])
+
+ # Now get the effective roles for the user and project, this
+ # should only include the direct role assignment on the project
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(1, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+
+ # Now add to more group roles, both inherited, to the domain
+ self.assignment_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[2]['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group2['id'],
+ domain_id=domain1['id'],
+ role_id=role_list[3]['id'],
+ inherited_to_projects=True)
+
+ # Now get the effective roles for the user and project again, this
+ # should now include the inherited roles on the domain
+ combined_list = self.assignment_api.get_roles_for_user_and_project(
+ user1['id'], project1['id'])
+ self.assertEqual(3, len(combined_list))
+ self.assertIn(role_list[0]['id'], combined_list)
+ self.assertIn(role_list[2]['id'], combined_list)
+ self.assertIn(role_list[3]['id'], combined_list)
+
+ # TODO(henry-nash): The test above uses get_roles_for_user_and_project
+ # which will, in a subsequent patch, be re-implemented to simply call
+ # list_role_assignments (see blueprint remove-role-metadata).
+ #
+ # The test plan below therefore mirrors this test, to ensure that
+ # list_role_assignments works the same. Once
+ # get_roles_for_user_and_project has been re-implemented then the
+ # manual tests above can be refactored to simply ensure it gives
+ # the same answers.
+ test_plan = {
+ # A domain with a user and project, 2 groups, plus 4 roles.
+ 'entities': {'domains': {'users': 1, 'projects': 1, 'groups': 2},
+ 'roles': 4},
+ 'group_memberships': [{'group': 0, 'users': [0]},
+ {'group': 1, 'users': [0]}],
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'group': 0, 'role': 1, 'domain': 0},
+ {'group': 1, 'role': 2, 'domain': 0,
+ 'inherited_to_projects': True},
+ {'group': 1, 'role': 3, 'domain': 0,
+ 'inherited_to_projects': True}],
+ 'tests': [
+ # List all effective assignments for user[0] on project[0].
+ # Should get one direct role and both inherited roles, but
+ # not the direct one on domain[0], even though user[0] is
+ # in group[0].
+ {'params': {'user': 0, 'project': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'domain': 0, 'group': 1}},
+ {'user': 0, 'role': 3, 'project': 0,
+ 'indirect': {'domain': 0, 'group': 1}}]}
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_projects_for_user_with_inherited_grants(self):
+ """Test inherited user roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create a domain, with two projects and a user
+ - Assign an inherited user role on the domain, as well as a direct
+ user role to a separate project in a different domain
+ - Get a list of projects for user, should return all three projects
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user1 = unit.new_user_ref(domain_id=domain['id'])
+ user1 = self.identity_api.create_user(user1)
+ project1 = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+
+ # Create 2 grants, one on a project and one inherited grant
+ # on the domain
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Should get back all three projects, one by virtue of the direct
+ # grant, plus both projects in the domain
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(3, len(user_projects))
+
+ # TODO(henry-nash): The test above uses list_projects_for_user
+ # which may, in a subsequent patch, be re-implemented to call
+ # list_role_assignments and then report only the distinct projects.
+ #
+ # The test plan below therefore mirrors this test, to ensure that
+ # list_role_assignments works the same. Once list_projects_for_user
+ # has been re-implemented then the manual tests above can be
+ # refactored.
+ test_plan = {
+ # A domain with 1 project, plus a second domain with 2 projects,
+ # as well as a user. Also, create 2 roles.
+ 'entities': {'domains': [{'projects': 1},
+ {'users': 1, 'projects': 2}],
+ 'roles': 2},
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 1, 'domain': 1,
+ 'inherited_to_projects': True}],
+ 'tests': [
+ # List all effective assignments for user[0]
+ # Should get one direct role plus one inherited role for each
+ # project in domain
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 1, 'project': 1,
+ 'indirect': {'domain': 1}},
+ {'user': 0, 'role': 1, 'project': 2,
+ 'indirect': {'domain': 1}}]}
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_projects_for_user_with_inherited_user_project_grants(self):
+ """Test inherited role assignments for users on nested projects.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create a hierarchy of projects with one root and one leaf project
+ - Assign an inherited user role on root project
+ - Assign a non-inherited user role on root project
+ - Get a list of projects for user, should return both projects
+ - Disable OS-INHERIT extension
+ - Get a list of projects for user, should return only root project
+
+ """
+ # Enable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ root_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ root_project = self.resource_api.create_project(root_project['id'],
+ root_project)
+ leaf_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ parent_id=root_project['id'])
+ leaf_project = self.resource_api.create_project(leaf_project['id'],
+ leaf_project)
+
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+
+ # Grant inherited user role
+ self.assignment_api.create_grant(user_id=user['id'],
+ project_id=root_project['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Grant non-inherited user role
+ self.assignment_api.create_grant(user_id=user['id'],
+ project_id=root_project['id'],
+ role_id=self.role_member['id'])
+ # Should get back both projects: because the direct role assignment for
+ # the root project and inherited role assignment for leaf project
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(2, len(user_projects))
+ self.assertIn(root_project, user_projects)
+ self.assertIn(leaf_project, user_projects)
+
+ # Disable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ # Should get back just root project - due the direct role assignment
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(1, len(user_projects))
+ self.assertIn(root_project, user_projects)
+
+ # TODO(henry-nash): The test above uses list_projects_for_user
+ # which may, in a subsequent patch, be re-implemented to call
+ # list_role_assignments and then report only the distinct projects.
+ #
+ # The test plan below therefore mirrors this test, to ensure that
+ # list_role_assignments works the same. Once list_projects_for_user
+ # has been re-implemented then the manual tests above can be
+ # refactored.
+ test_plan = {
+ # A domain with a project and sub-project, plus a user.
+ # Also, create 2 roles.
+ 'entities': {
+ 'domains': {'id': CONF.identity.default_domain_id, 'users': 1,
+ 'projects': {'project': 1}},
+ 'roles': 2},
+ # A direct role and an inherited role on the parent
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 1, 'project': 0,
+ 'inherited_to_projects': True}],
+ 'tests': [
+ # List all effective assignments for user[0] - should get back
+ # one direct role plus one inherited role.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 1, 'project': 1,
+ 'indirect': {'project': 0}}]}
+ ]
+ }
+
+ test_plan_with_os_inherit_disabled = {
+ 'tests': [
+ # List all effective assignments for user[0] - should only get
+ # back the one direct role.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0}]}
+ ]
+ }
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ test_data = self.execute_assignment_plan(test_plan)
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ # Pass the existing test data in to allow execution of 2nd test plan
+ self.execute_assignment_cases(
+ test_plan_with_os_inherit_disabled, test_data)
+
+ def test_list_projects_for_user_with_inherited_group_grants(self):
+ """Test inherited group roles.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create two domains, each with two projects
+ - Create a user and group
+ - Make the user a member of the group
+ - Assign a user role two projects, an inherited
+ group role to one domain and an inherited regular role on
+ the other domain
+ - Get a list of projects for user, should return both pairs of projects
+ from the domain, plus the one separate project
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project1 = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ project3 = unit.new_project_ref(domain_id=domain2['id'])
+ self.resource_api.create_project(project3['id'], project3)
+ project4 = unit.new_project_ref(domain_id=domain2['id'])
+ self.resource_api.create_project(project4['id'], project4)
+ user1 = unit.new_user_ref(domain_id=domain['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain['id'])
+ group1 = self.identity_api.create_group(group1)
+ self.identity_api.add_user_to_group(user1['id'], group1['id'])
+
+ # Create 4 grants:
+ # - one user grant on a project in domain2
+ # - one user grant on a project in the default domain
+ # - one inherited user grant on domain
+ # - one inherited group grant on domain2
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project3['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=self.tenant_bar['id'],
+ role_id=self.role_member['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain2['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Should get back all five projects, but without a duplicate for
+ # project3 (since it has both a direct user role and an inherited role)
+ user_projects = self.assignment_api.list_projects_for_user(user1['id'])
+ self.assertEqual(5, len(user_projects))
+
+ # TODO(henry-nash): The test above uses list_projects_for_user
+ # which may, in a subsequent patch, be re-implemented to call
+ # list_role_assignments and then report only the distinct projects.
+ #
+ # The test plan below therefore mirrors this test, to ensure that
+ # list_role_assignments works the same. Once list_projects_for_user
+ # has been re-implemented then the manual tests above can be
+ # refactored.
+ test_plan = {
+ # A domain with a 1 project, plus a second domain with 2 projects,
+ # as well as a user & group and a 3rd domain with 2 projects.
+ # Also, created 2 roles.
+ 'entities': {'domains': [{'projects': 1},
+ {'users': 1, 'groups': 1, 'projects': 2},
+ {'projects': 2}],
+ 'roles': 2},
+ 'group_memberships': [{'group': 0, 'users': [0]}],
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 0, 'project': 3},
+ {'user': 0, 'role': 1, 'domain': 1,
+ 'inherited_to_projects': True},
+ {'user': 0, 'role': 1, 'domain': 2,
+ 'inherited_to_projects': True}],
+ 'tests': [
+ # List all effective assignments for user[0]
+ # Should get back both direct roles plus roles on both projects
+ # from each domain. Duplicates should not be filtered out.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 3},
+ {'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 1, 'project': 1,
+ 'indirect': {'domain': 1}},
+ {'user': 0, 'role': 1, 'project': 2,
+ 'indirect': {'domain': 1}},
+ {'user': 0, 'role': 1, 'project': 3,
+ 'indirect': {'domain': 2}},
+ {'user': 0, 'role': 1, 'project': 4,
+ 'indirect': {'domain': 2}}]}
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_projects_for_user_with_inherited_group_project_grants(self):
+ """Test inherited role assignments for groups on nested projects.
+
+ Test Plan:
+
+ - Enable OS-INHERIT extension
+ - Create a hierarchy of projects with one root and one leaf project
+ - Assign an inherited group role on root project
+ - Assign a non-inherited group role on root project
+ - Get a list of projects for user, should return both projects
+ - Disable OS-INHERIT extension
+ - Get a list of projects for user, should return only root project
+
+ """
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ root_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ root_project = self.resource_api.create_project(root_project['id'],
+ root_project)
+ leaf_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ parent_id=root_project['id'])
+ leaf_project = self.resource_api.create_project(leaf_project['id'],
+ leaf_project)
+
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+ self.identity_api.add_user_to_group(user['id'], group['id'])
+
+ # Grant inherited group role
+ self.assignment_api.create_grant(group_id=group['id'],
+ project_id=root_project['id'],
+ role_id=self.role_admin['id'],
+ inherited_to_projects=True)
+ # Grant non-inherited group role
+ self.assignment_api.create_grant(group_id=group['id'],
+ project_id=root_project['id'],
+ role_id=self.role_member['id'])
+ # Should get back both projects: because the direct role assignment for
+ # the root project and inherited role assignment for leaf project
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(2, len(user_projects))
+ self.assertIn(root_project, user_projects)
+ self.assertIn(leaf_project, user_projects)
+
+ # Disable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ # Should get back just root project - due the direct role assignment
+ user_projects = self.assignment_api.list_projects_for_user(user['id'])
+ self.assertEqual(1, len(user_projects))
+ self.assertIn(root_project, user_projects)
+
+ # TODO(henry-nash): The test above uses list_projects_for_user
+ # which may, in a subsequent patch, be re-implemented to call
+ # list_role_assignments and then report only the distinct projects.
+ #
+ # The test plan below therefore mirrors this test, to ensure that
+ # list_role_assignments works the same. Once list_projects_for_user
+ # has been re-implemented then the manual tests above can be
+ # refactored.
+ test_plan = {
+ # A domain with a project ans sub-project, plus a user.
+ # Also, create 2 roles.
+ 'entities': {
+ 'domains': {'id': CONF.identity.default_domain_id, 'users': 1,
+ 'groups': 1,
+ 'projects': {'project': 1}},
+ 'roles': 2},
+ 'group_memberships': [{'group': 0, 'users': [0]}],
+ # A direct role and an inherited role on the parent
+ 'assignments': [{'group': 0, 'role': 0, 'project': 0},
+ {'group': 0, 'role': 1, 'project': 0,
+ 'inherited_to_projects': True}],
+ 'tests': [
+ # List all effective assignments for user[0] - should get back
+ # one direct role plus one inherited role.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0,
+ 'indirect': {'group': 0}},
+ {'user': 0, 'role': 1, 'project': 1,
+ 'indirect': {'group': 0, 'project': 0}}]}
+ ]
+ }
+
+ test_plan_with_os_inherit_disabled = {
+ 'tests': [
+ # List all effective assignments for user[0] - should only get
+ # back the one direct role.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0,
+ 'indirect': {'group': 0}}]}
+ ]
+ }
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ test_data = self.execute_assignment_plan(test_plan)
+ self.config_fixture.config(group='os_inherit', enabled=False)
+ # Pass the existing test data in to allow execution of 2nd test plan
+ self.execute_assignment_cases(
+ test_plan_with_os_inherit_disabled, test_data)
+
+ def test_list_assignments_for_tree(self):
+ """Test we correctly list direct assignments for a tree"""
+ # Enable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ test_plan = {
+ # Create a domain with a project hierarchy 3 levels deep:
+ #
+ # project 0
+ # ____________|____________
+ # | |
+ # project 1 project 4
+ # ______|_____ ______|_____
+ # | | | |
+ # project 2 project 3 project 5 project 6
+ #
+ # Also, create 1 user and 4 roles.
+ 'entities': {
+ 'domains': {
+ 'projects': {'project': [{'project': 2},
+ {'project': 2}]},
+ 'users': 1},
+ 'roles': 4},
+ 'assignments': [
+ # Direct assignment to projects 1 and 2
+ {'user': 0, 'role': 0, 'project': 1},
+ {'user': 0, 'role': 1, 'project': 2},
+ # Also an inherited assignment on project 1
+ {'user': 0, 'role': 2, 'project': 1,
+ 'inherited_to_projects': True},
+ # ...and two spoiler assignments, one to the root and one
+ # to project 4
+ {'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 3, 'project': 4}],
+ 'tests': [
+ # List all assignments for project 1 and its subtree.
+ {'params': {'project': 1, 'include_subtree': True},
+ 'results': [
+ # Only the actual assignments should be returned, no
+ # expansion of inherited assignments
+ {'user': 0, 'role': 0, 'project': 1},
+ {'user': 0, 'role': 1, 'project': 2},
+ {'user': 0, 'role': 2, 'project': 1,
+ 'inherited_to_projects': 'projects'}]}
+ ]
+ }
+
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_effective_assignments_for_tree(self):
+ """Test we correctly list effective assignments for a tree"""
+ # Enable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ test_plan = {
+ # Create a domain with a project hierarchy 3 levels deep:
+ #
+ # project 0
+ # ____________|____________
+ # | |
+ # project 1 project 4
+ # ______|_____ ______|_____
+ # | | | |
+ # project 2 project 3 project 5 project 6
+ #
+ # Also, create 1 user and 4 roles.
+ 'entities': {
+ 'domains': {
+ 'projects': {'project': [{'project': 2},
+ {'project': 2}]},
+ 'users': 1},
+ 'roles': 4},
+ 'assignments': [
+ # An inherited assignment on project 1
+ {'user': 0, 'role': 1, 'project': 1,
+ 'inherited_to_projects': True},
+ # A direct assignment to project 2
+ {'user': 0, 'role': 2, 'project': 2},
+ # ...and two spoiler assignments, one to the root and one
+ # to project 4
+ {'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 3, 'project': 4}],
+ 'tests': [
+ # List all effective assignments for project 1 and its subtree.
+ {'params': {'project': 1, 'effective': True,
+ 'include_subtree': True},
+ 'results': [
+ # The inherited assignment on project 1 should appear only
+ # on its children
+ {'user': 0, 'role': 1, 'project': 2,
+ 'indirect': {'project': 1}},
+ {'user': 0, 'role': 1, 'project': 3,
+ 'indirect': {'project': 1}},
+ # And finally the direct assignment on project 2
+ {'user': 0, 'role': 2, 'project': 2}]}
+ ]
+ }
+
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_effective_assignments_for_tree_with_mixed_assignments(self):
+ """Test that we correctly combine assignments for a tree.
+
+ In this test we want to ensure that when asking for a list of
+ assignments in a subtree, any assignments inherited from above the
+ subtree are correctly combined with any assignments within the subtree
+ itself.
+
+ """
+ # Enable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ test_plan = {
+ # Create a domain with a project hierarchy 3 levels deep:
+ #
+ # project 0
+ # ____________|____________
+ # | |
+ # project 1 project 4
+ # ______|_____ ______|_____
+ # | | | |
+ # project 2 project 3 project 5 project 6
+ #
+ # Also, create 2 users, 1 group and 4 roles.
+ 'entities': {
+ 'domains': {
+ 'projects': {'project': [{'project': 2},
+ {'project': 2}]},
+ 'users': 2, 'groups': 1},
+ 'roles': 4},
+ # Both users are part of the same group
+ 'group_memberships': [{'group': 0, 'users': [0, 1]}],
+ # We are going to ask for listing of assignment on project 1 and
+ # it's subtree. So first we'll add two inherited assignments above
+ # this (one user and one for a group that contains this user).
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0,
+ 'inherited_to_projects': True},
+ {'group': 0, 'role': 1, 'project': 0,
+ 'inherited_to_projects': True},
+ # Now an inherited assignment on project 1 itself,
+ # which should ONLY show up on its children
+ {'user': 0, 'role': 2, 'project': 1,
+ 'inherited_to_projects': True},
+ # ...and a direct assignment on one of those
+ # children
+ {'user': 0, 'role': 3, 'project': 2},
+ # The rest are spoiler assignments
+ {'user': 0, 'role': 2, 'project': 5},
+ {'user': 0, 'role': 3, 'project': 4}],
+ 'tests': [
+ # List all effective assignments for project 1 and its subtree.
+ {'params': {'project': 1, 'user': 0, 'effective': True,
+ 'include_subtree': True},
+ 'results': [
+ # First, we should see the inherited user assignment from
+ # project 0 on all projects in the subtree
+ {'user': 0, 'role': 0, 'project': 1,
+ 'indirect': {'project': 0}},
+ {'user': 0, 'role': 0, 'project': 2,
+ 'indirect': {'project': 0}},
+ {'user': 0, 'role': 0, 'project': 3,
+ 'indirect': {'project': 0}},
+ # Also the inherited group assignment from project 0 on
+ # the subtree
+ {'user': 0, 'role': 1, 'project': 1,
+ 'indirect': {'project': 0, 'group': 0}},
+ {'user': 0, 'role': 1, 'project': 2,
+ 'indirect': {'project': 0, 'group': 0}},
+ {'user': 0, 'role': 1, 'project': 3,
+ 'indirect': {'project': 0, 'group': 0}},
+ # The inherited assignment on project 1 should appear only
+ # on its children
+ {'user': 0, 'role': 2, 'project': 2,
+ 'indirect': {'project': 1}},
+ {'user': 0, 'role': 2, 'project': 3,
+ 'indirect': {'project': 1}},
+ # And finally the direct assignment on project 2
+ {'user': 0, 'role': 3, 'project': 2}]}
+ ]
+ }
+
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_effective_assignments_for_tree_with_domain_assignments(self):
+ """Test we correctly honor domain inherited assignments on the tree"""
+ # Enable OS-INHERIT extension
+ self.config_fixture.config(group='os_inherit', enabled=True)
+
+ test_plan = {
+ # Create a domain with a project hierarchy 3 levels deep:
+ #
+ # project 0
+ # ____________|____________
+ # | |
+ # project 1 project 4
+ # ______|_____ ______|_____
+ # | | | |
+ # project 2 project 3 project 5 project 6
+ #
+ # Also, create 1 user and 4 roles.
+ 'entities': {
+ 'domains': {
+ 'projects': {'project': [{'project': 2},
+ {'project': 2}]},
+ 'users': 1},
+ 'roles': 4},
+ 'assignments': [
+ # An inherited assignment on the domain (which should be
+ # applied to all the projects)
+ {'user': 0, 'role': 1, 'domain': 0,
+ 'inherited_to_projects': True},
+ # A direct assignment to project 2
+ {'user': 0, 'role': 2, 'project': 2},
+ # ...and two spoiler assignments, one to the root and one
+ # to project 4
+ {'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 3, 'project': 4}],
+ 'tests': [
+ # List all effective assignments for project 1 and its subtree.
+ {'params': {'project': 1, 'effective': True,
+ 'include_subtree': True},
+ 'results': [
+ # The inherited assignment from the domain should appear
+ # only on the part of the subtree we are interested in
+ {'user': 0, 'role': 1, 'project': 1,
+ 'indirect': {'domain': 0}},
+ {'user': 0, 'role': 1, 'project': 2,
+ 'indirect': {'domain': 0}},
+ {'user': 0, 'role': 1, 'project': 3,
+ 'indirect': {'domain': 0}},
+ # And finally the direct assignment on project 2
+ {'user': 0, 'role': 2, 'project': 2}]}
+ ]
+ }
+
+ self.execute_assignment_plan(test_plan)
+
+ def test_list_user_ids_for_project_with_inheritance(self):
+ test_plan = {
+ # A domain with a project and sub-project, plus four users,
+ # two groups, as well as 4 roles.
+ 'entities': {
+ 'domains': {'id': CONF.identity.default_domain_id, 'users': 4,
+ 'groups': 2,
+ 'projects': {'project': 1}},
+ 'roles': 4},
+ # Each group has a unique user member
+ 'group_memberships': [{'group': 0, 'users': [1]},
+ {'group': 1, 'users': [3]}],
+ # Set up assignments so that there should end up with four
+ # effective assignments on project 1 - one direct, one due to
+ # group membership and one user assignment inherited from the
+ # parent and one group assignment inhertied from the parent.
+ 'assignments': [{'user': 0, 'role': 0, 'project': 1},
+ {'group': 0, 'role': 1, 'project': 1},
+ {'user': 2, 'role': 2, 'project': 0,
+ 'inherited_to_projects': True},
+ {'group': 1, 'role': 3, 'project': 0,
+ 'inherited_to_projects': True}],
+ }
+ # Use assignment plan helper to create all the entities and
+ # assignments - then we'll run our own tests using the data
+ test_data = self.execute_assignment_plan(test_plan)
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ user_ids = self.assignment_api.list_user_ids_for_project(
+ test_data['projects'][1]['id'])
+ self.assertThat(user_ids, matchers.HasLength(4))
+ for x in range(0, 4):
+ self.assertIn(test_data['users'][x]['id'], user_ids)
+
+ def test_list_role_assignment_using_inherited_sourced_groups(self):
+ """Test listing inherited assignments when restricted by groups."""
+ test_plan = {
+ # A domain with 3 users, 3 groups, 3 projects, a second domain,
+ # plus 3 roles.
+ 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3},
+ 1],
+ 'roles': 3},
+ # Users 0 & 1 are in the group 0, User 0 also in group 1
+ 'group_memberships': [{'group': 0, 'users': [0, 1]},
+ {'group': 1, 'users': [0]}],
+ # Spread the assignments around - we want to be able to show that
+ # if sourced by group, assignments from other sources are excluded
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0},
+ {'group': 0, 'role': 1, 'domain': 1},
+ {'group': 1, 'role': 2, 'domain': 0,
+ 'inherited_to_projects': True},
+ {'group': 1, 'role': 2, 'project': 1},
+ {'user': 2, 'role': 1, 'project': 1,
+ 'inherited_to_projects': True},
+ {'group': 2, 'role': 2, 'project': 2}
+ ],
+ 'tests': [
+ # List all effective assignments sourced from groups 0 and 1.
+ # We should see the inherited group assigned on the 3 projects
+ # from domain 0, as well as the direct assignments.
+ {'params': {'source_from_group_ids': [0, 1],
+ 'effective': True},
+ 'results': [{'group': 0, 'role': 1, 'domain': 1},
+ {'group': 1, 'role': 2, 'project': 0,
+ 'indirect': {'domain': 0}},
+ {'group': 1, 'role': 2, 'project': 1,
+ 'indirect': {'domain': 0}},
+ {'group': 1, 'role': 2, 'project': 2,
+ 'indirect': {'domain': 0}},
+ {'group': 1, 'role': 2, 'project': 1}
+ ]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+
+class ImpliedRoleTests(AssignmentTestHelperMixin):
+
+ def test_implied_role_crd(self):
+ prior_role_ref = unit.new_role_ref()
+ self.role_api.create_role(prior_role_ref['id'], prior_role_ref)
+ implied_role_ref = unit.new_role_ref()
+ self.role_api.create_role(implied_role_ref['id'], implied_role_ref)
+
+ self.role_api.create_implied_role(
+ prior_role_ref['id'],
+ implied_role_ref['id'])
+ implied_role = self.role_api.get_implied_role(
+ prior_role_ref['id'],
+ implied_role_ref['id'])
+ expected_implied_role_ref = {
+ 'prior_role_id': prior_role_ref['id'],
+ 'implied_role_id': implied_role_ref['id']}
+ self.assertDictContainsSubset(
+ expected_implied_role_ref,
+ implied_role)
+
+ self.role_api.delete_implied_role(
+ prior_role_ref['id'],
+ implied_role_ref['id'])
+ self.assertRaises(exception.ImpliedRoleNotFound,
+ self.role_api.get_implied_role,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_delete_implied_role_returns_not_found(self):
+ self.assertRaises(exception.ImpliedRoleNotFound,
+ self.role_api.delete_implied_role,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_role_assignments_simple_tree_of_implied_roles(self):
+ """Test that implied roles are expanded out."""
+ test_plan = {
+ 'entities': {'domains': {'users': 1, 'projects': 1},
+ 'roles': 4},
+ # Three level tree of implied roles
+ 'implied_roles': [{'role': 0, 'implied_roles': 1},
+ {'role': 1, 'implied_roles': [2, 3]}],
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0}],
+ 'tests': [
+ # List all direct assignments for user[0], this should just
+ # show the one top level role assignment
+ {'params': {'user': 0},
+ 'results': [{'user': 0, 'role': 0, 'project': 0}]},
+ # Listing in effective mode should show the implied roles
+ # expanded out
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 1, 'project': 0,
+ 'indirect': {'role': 0}},
+ {'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'role': 1}},
+ {'user': 0, 'role': 3, 'project': 0,
+ 'indirect': {'role': 1}}]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_circular_inferences(self):
+ """Test that implied roles are expanded out."""
+ test_plan = {
+ 'entities': {'domains': {'users': 1, 'projects': 1},
+ 'roles': 4},
+ # Three level tree of implied roles
+ 'implied_roles': [{'role': 0, 'implied_roles': [1]},
+ {'role': 1, 'implied_roles': [2, 3]},
+ {'role': 3, 'implied_roles': [0]}],
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0}],
+ 'tests': [
+ # List all direct assignments for user[0], this should just
+ # show the one top level role assignment
+ {'params': {'user': 0},
+ 'results': [{'user': 0, 'role': 0, 'project': 0}]},
+ # Listing in effective mode should show the implied roles
+ # expanded out
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 0, 'project': 0,
+ 'indirect': {'role': 3}},
+ {'user': 0, 'role': 1, 'project': 0,
+ 'indirect': {'role': 0}},
+ {'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'role': 1}},
+ {'user': 0, 'role': 3, 'project': 0,
+ 'indirect': {'role': 1}}]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_role_assignments_directed_graph_of_implied_roles(self):
+ """Test that a role can have multiple, different prior roles."""
+ test_plan = {
+ 'entities': {'domains': {'users': 1, 'projects': 1},
+ 'roles': 6},
+ # Three level tree of implied roles, where one of the roles at the
+ # bottom is implied by more than one top level role
+ 'implied_roles': [{'role': 0, 'implied_roles': [1, 2]},
+ {'role': 1, 'implied_roles': [3, 4]},
+ {'role': 5, 'implied_roles': 4}],
+ # The user gets both top level roles
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 5, 'project': 0}],
+ 'tests': [
+ # The implied roles should be expanded out and there should be
+ # two entries for the role that had two different prior roles.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 5, 'project': 0},
+ {'user': 0, 'role': 1, 'project': 0,
+ 'indirect': {'role': 0}},
+ {'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'role': 0}},
+ {'user': 0, 'role': 3, 'project': 0,
+ 'indirect': {'role': 1}},
+ {'user': 0, 'role': 4, 'project': 0,
+ 'indirect': {'role': 1}},
+ {'user': 0, 'role': 4, 'project': 0,
+ 'indirect': {'role': 5}}]},
+ ]
+ }
+ test_data = self.execute_assignment_plan(test_plan)
+
+ # We should also be able to get a similar (yet summarized) answer to
+ # the above by calling get_roles_for_user_and_project(), which should
+ # list the role_ids, yet remove any duplicates
+ role_ids = self.assignment_api.get_roles_for_user_and_project(
+ test_data['users'][0]['id'], test_data['projects'][0]['id'])
+ # We should see 6 entries, not 7, since role index 5 appeared twice in
+ # the answer from list_role_assignments
+ self.assertThat(role_ids, matchers.HasLength(6))
+ for x in range(0, 5):
+ self.assertIn(test_data['roles'][x]['id'], role_ids)
+
+ def test_role_assignments_implied_roles_filtered_by_role(self):
+ """Test that you can filter by role even if roles are implied."""
+ test_plan = {
+ 'entities': {'domains': {'users': 1, 'projects': 2},
+ 'roles': 4},
+ # Three level tree of implied roles
+ 'implied_roles': [{'role': 0, 'implied_roles': 1},
+ {'role': 1, 'implied_roles': [2, 3]}],
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0},
+ {'user': 0, 'role': 3, 'project': 1}],
+ 'tests': [
+ # List effective roles filtering by one of the implied roles,
+ # showing that the filter was implied post expansion of
+ # implied roles (and that non impled roles are included in
+ # the filter
+ {'params': {'role': 3, 'effective': True},
+ 'results': [{'user': 0, 'role': 3, 'project': 0,
+ 'indirect': {'role': 1}},
+ {'user': 0, 'role': 3, 'project': 1}]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_role_assignments_simple_tree_of_implied_roles_on_domain(self):
+ """Test that implied roles are expanded out when placed on a domain."""
+ test_plan = {
+ 'entities': {'domains': {'users': 1},
+ 'roles': 4},
+ # Three level tree of implied roles
+ 'implied_roles': [{'role': 0, 'implied_roles': 1},
+ {'role': 1, 'implied_roles': [2, 3]}],
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0}],
+ 'tests': [
+ # List all direct assignments for user[0], this should just
+ # show the one top level role assignment
+ {'params': {'user': 0},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0}]},
+ # Listing in effective mode should how the implied roles
+ # expanded out
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0},
+ {'user': 0, 'role': 1, 'domain': 0,
+ 'indirect': {'role': 0}},
+ {'user': 0, 'role': 2, 'domain': 0,
+ 'indirect': {'role': 1}},
+ {'user': 0, 'role': 3, 'domain': 0,
+ 'indirect': {'role': 1}}]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
+
+ def test_role_assignments_inherited_implied_roles(self):
+ """Test that you can intermix inherited and implied roles."""
+ test_plan = {
+ 'entities': {'domains': {'users': 1, 'projects': 1},
+ 'roles': 4},
+ # Simply one level of implied roles
+ 'implied_roles': [{'role': 0, 'implied_roles': 1}],
+ # Assign to top level role as an inherited assignment to the
+ # domain
+ 'assignments': [{'user': 0, 'role': 0, 'domain': 0,
+ 'inherited_to_projects': True}],
+ 'tests': [
+ # List all direct assignments for user[0], this should just
+ # show the one top level role assignment
+ {'params': {'user': 0},
+ 'results': [{'user': 0, 'role': 0, 'domain': 0,
+ 'inherited_to_projects': 'projects'}]},
+ # List in effective mode - we should only see the initial and
+ # implied role on the project (since inherited roles are not
+ # active on their anchor point).
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 0, 'project': 0,
+ 'indirect': {'domain': 0}},
+ {'user': 0, 'role': 1, 'project': 0,
+ 'indirect': {'domain': 0, 'role': 0}}]},
+ ]
+ }
+ self.config_fixture.config(group='os_inherit', enabled=True)
+ self.execute_assignment_plan(test_plan)
+
+ def test_role_assignments_domain_specific_with_implied_roles(self):
+ test_plan = {
+ 'entities': {'domains': {'users': 1, 'projects': 1, 'roles': 2},
+ 'roles': 2},
+ # Two level tree of implied roles, with the top and 1st level being
+ # domain specific roles, and the bottom level being infered global
+ # roles.
+ 'implied_roles': [{'role': 0, 'implied_roles': [1]},
+ {'role': 1, 'implied_roles': [2, 3]}],
+ 'assignments': [{'user': 0, 'role': 0, 'project': 0}],
+ 'tests': [
+ # List all direct assignments for user[0], this should just
+ # show the one top level role assignment, even though this is a
+ # domain specific role (since we are in non-effective mode and
+ # we show any direct role assignment in that mode).
+ {'params': {'user': 0},
+ 'results': [{'user': 0, 'role': 0, 'project': 0}]},
+ # Now the effective ones - so the implied roles should be
+ # expanded out, as well as any domain specific roles should be
+ # removed.
+ {'params': {'user': 0, 'effective': True},
+ 'results': [{'user': 0, 'role': 2, 'project': 0,
+ 'indirect': {'role': 1}},
+ {'user': 0, 'role': 3, 'project': 0,
+ 'indirect': {'role': 1}}]},
+ ]
+ }
+ self.execute_assignment_plan(test_plan)
diff --git a/keystone-moon/keystone/tests/unit/assignment/test_core.py b/keystone-moon/keystone/tests/unit/assignment/test_core.py
new file mode 100644
index 00000000..494e19c3
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/assignment/test_core.py
@@ -0,0 +1,123 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit import default_fixtures
+
+
+class RoleTests(object):
+
+ def test_get_role_returns_not_found(self):
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ uuid.uuid4().hex)
+
+ def test_create_duplicate_role_name_fails(self):
+ role = unit.new_role_ref(id='fake1', name='fake1name')
+ self.role_api.create_role('fake1', role)
+ role['id'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.role_api.create_role,
+ 'fake2',
+ role)
+
+ def test_rename_duplicate_role_name_fails(self):
+ role1 = unit.new_role_ref(id='fake1', name='fake1name')
+ role2 = unit.new_role_ref(id='fake2', name='fake2name')
+ self.role_api.create_role('fake1', role1)
+ self.role_api.create_role('fake2', role2)
+ role1['name'] = 'fake2name'
+ self.assertRaises(exception.Conflict,
+ self.role_api.update_role,
+ 'fake1',
+ role1)
+
+ def test_role_crud(self):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ role_ref = self.role_api.get_role(role['id'])
+ role_ref_dict = {x: role_ref[x] for x in role_ref}
+ self.assertDictEqual(role, role_ref_dict)
+
+ role['name'] = uuid.uuid4().hex
+ updated_role_ref = self.role_api.update_role(role['id'], role)
+ role_ref = self.role_api.get_role(role['id'])
+ role_ref_dict = {x: role_ref[x] for x in role_ref}
+ self.assertDictEqual(role, role_ref_dict)
+ self.assertDictEqual(role_ref_dict, updated_role_ref)
+
+ self.role_api.delete_role(role['id'])
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role['id'])
+
+ def test_update_role_returns_not_found(self):
+ role = unit.new_role_ref()
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.update_role,
+ role['id'],
+ role)
+
+ def test_list_roles(self):
+ roles = self.role_api.list_roles()
+ self.assertEqual(len(default_fixtures.ROLES), len(roles))
+ role_ids = set(role['id'] for role in roles)
+ expected_role_ids = set(role['id'] for role in default_fixtures.ROLES)
+ self.assertEqual(expected_role_ids, role_ids)
+
+ @unit.skip_if_cache_disabled('role')
+ def test_cache_layer_role_crud(self):
+ role = unit.new_role_ref()
+ role_id = role['id']
+ # Create role
+ self.role_api.create_role(role_id, role)
+ role_ref = self.role_api.get_role(role_id)
+ updated_role_ref = copy.deepcopy(role_ref)
+ updated_role_ref['name'] = uuid.uuid4().hex
+ # Update role, bypassing the role api manager
+ self.role_api.driver.update_role(role_id, updated_role_ref)
+ # Verify get_role still returns old ref
+ self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
+ # Invalidate Cache
+ self.role_api.get_role.invalidate(self.role_api, role_id)
+ # Verify get_role returns the new role_ref
+ self.assertDictEqual(updated_role_ref,
+ self.role_api.get_role(role_id))
+ # Update role back to original via the assignment api manager
+ self.role_api.update_role(role_id, role_ref)
+ # Verify get_role returns the original role ref
+ self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
+ # Delete role bypassing the role api manager
+ self.role_api.driver.delete_role(role_id)
+ # Verify get_role still returns the role_ref
+ self.assertDictEqual(role_ref, self.role_api.get_role(role_id))
+ # Invalidate cache
+ self.role_api.get_role.invalidate(self.role_api, role_id)
+ # Verify RoleNotFound is now raised
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role_id)
+ # recreate role
+ self.role_api.create_role(role_id, role)
+ self.role_api.get_role(role_id)
+ # delete role via the assignment api manager
+ self.role_api.delete_role(role_id)
+ # verity RoleNotFound is now raised
+ self.assertRaises(exception.RoleNotFound,
+ self.role_api.get_role,
+ role_id)
diff --git a/keystone-moon/keystone/tests/unit/backend/core_ldap.py b/keystone-moon/keystone/tests/unit/backend/core_ldap.py
index 869bb620..8b72c62a 100644
--- a/keystone-moon/keystone/tests/unit/backend/core_ldap.py
+++ b/keystone-moon/keystone/tests/unit/backend/core_ldap.py
@@ -86,6 +86,7 @@ class BaseBackendLdapCommon(object):
class BaseBackendLdap(object):
"""Mixin class to set up an all-LDAP configuration."""
+
def setUp(self):
# NOTE(dstanek): The database must be setup prior to calling the
# parent's setUp. The parent's setUp uses services (like
@@ -113,7 +114,7 @@ class BaseBackendLdapIdentitySqlEverythingElse(unit.SQLDriverOverrides):
super(BaseBackendLdapIdentitySqlEverythingElse, self).setUp()
self.clear_database()
self.load_backends()
- cache.configure_cache_region(cache.REGION)
+ cache.configure_cache()
sqldb.recreate()
self.load_fixtures(default_fixtures)
@@ -137,6 +138,7 @@ class BaseBackendLdapIdentitySqlEverythingElseWithMapping(object):
Setting backward_compatible_ids to False will enable this mapping.
"""
+
def config_overrides(self):
super(BaseBackendLdapIdentitySqlEverythingElseWithMapping,
self).config_overrides()
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py
new file mode 100644
index 00000000..da1490a7
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py
@@ -0,0 +1,39 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.tests.unit import test_backend_sql
+
+
+class SqlIdentityV8(test_backend_sql.SqlIdentity):
+ """Test that a V8 driver still passes the same tests.
+
+ We use the SQL driver as an example of a V8 legacy driver.
+
+ """
+
+ def config_overrides(self):
+ super(SqlIdentityV8, self).config_overrides()
+ # V8 SQL specific driver overrides
+ self.config_fixture.config(
+ group='assignment',
+ driver='keystone.assignment.V8_backends.sql.Assignment')
+ self.use_specific_sql_driver_version(
+ 'keystone.assignment', 'backends', 'V8_')
+
+ def test_delete_project_assignments_same_id_as_domain(self):
+ self.skipTest("V8 doesn't support project acting as a domain.")
+
+ def test_delete_user_assignments_user_same_id_as_group(self):
+ self.skipTest("Groups and users with the same ID are not supported.")
+
+ def test_delete_group_assignments_group_same_id_as_user(self):
+ self.skipTest("Groups and users with the same ID are not supported.")
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py
new file mode 100644
index 00000000..d5469768
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py
@@ -0,0 +1,108 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from six.moves import http_client
+
+from keystone.tests.unit import test_v3_federation
+
+
+class FederatedSetupMixinV8(object):
+ def useV8driver(self):
+ # We use the SQL driver as an example V8 driver, so override
+ # the current driver with that version.
+ self.config_fixture.config(
+ group='federation',
+ driver='keystone.federation.V8_backends.sql.Federation')
+ self.use_specific_sql_driver_version(
+ 'keystone.federation', 'backends', 'V8_')
+
+
+class FederatedIdentityProviderTestsV8(
+ test_v3_federation.FederatedIdentityProviderTests,
+ FederatedSetupMixinV8):
+ """Test that a V8 driver still passes the same tests."""
+
+ def config_overrides(self):
+ super(FederatedIdentityProviderTestsV8, self).config_overrides()
+ self.useV8driver()
+
+ def test_create_idp_remote_repeated(self):
+ """Creates two IdentityProvider entities with some remote_ids
+
+ A remote_id is the same for both so the second IdP is not
+ created because of the uniqueness of the remote_ids
+
+ Expect HTTP 409 Conflict code for the latter call.
+
+ Note: V9 drivers and later augment the conflict message with
+ additional information, which won't be present if we are running
+ a V8 driver - so override the newer tests to just ensure a
+ conflict message is raised.
+ """
+ body = self.default_body.copy()
+ repeated_remote_id = uuid.uuid4().hex
+ body['remote_ids'] = [uuid.uuid4().hex,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex,
+ repeated_remote_id]
+ self._create_default_idp(body=body)
+
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ body['remote_ids'] = [uuid.uuid4().hex,
+ repeated_remote_id]
+ self.put(url, body={'identity_provider': body},
+ expected_status=http_client.CONFLICT)
+
+ def test_check_idp_uniqueness(self):
+ """Add same IdP twice.
+
+ Expect HTTP 409 Conflict code for the latter call.
+
+ Note: V9 drivers and later augment the conflict message with
+ additional information, which won't be present if we are running
+ a V8 driver - so override the newer tests to just ensure a
+ conflict message is raised.
+ """
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ body = self._http_idp_input()
+ self.put(url, body={'identity_provider': body},
+ expected_status=http_client.CREATED)
+ self.put(url, body={'identity_provider': body},
+ expected_status=http_client.CONFLICT)
+
+
+class MappingCRUDTestsV8(
+ test_v3_federation.MappingCRUDTests,
+ FederatedSetupMixinV8):
+ """Test that a V8 driver still passes the same tests."""
+
+ def config_overrides(self):
+ super(MappingCRUDTestsV8, self).config_overrides()
+ self.useV8driver()
+
+
+class ServiceProviderTestsV8(
+ test_v3_federation.ServiceProviderTests,
+ FederatedSetupMixinV8):
+ """Test that a V8 driver still passes the same tests."""
+
+ def config_overrides(self):
+ super(ServiceProviderTestsV8, self).config_overrides()
+ self.useV8driver()
+
+ def test_filter_list_sp_by_id(self):
+ self.skipTest('Operation not supported in v8 and earlier drivers')
+
+ def test_filter_list_sp_by_enabled(self):
+ self.skipTest('Operation not supported in v8 and earlier drivers')
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py
new file mode 100644
index 00000000..16acbdc3
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py
@@ -0,0 +1,71 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import unittest
+
+from keystone.resource.V8_backends import sql
+from keystone.tests import unit
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit.resource import test_backends
+from keystone.tests.unit import test_backend_sql
+
+
+class SqlIdentityV8(test_backend_sql.SqlIdentity):
+ """Test that a V8 driver still passes the same tests.
+
+ We use the SQL driver as an example of a V8 legacy driver.
+
+ """
+
+ def config_overrides(self):
+ super(SqlIdentityV8, self).config_overrides()
+ # V8 SQL specific driver overrides
+ self.config_fixture.config(
+ group='resource',
+ driver='keystone.resource.V8_backends.sql.Resource')
+ self.use_specific_sql_driver_version(
+ 'keystone.resource', 'backends', 'V8_')
+
+ def test_delete_projects_from_ids(self):
+ self.skipTest('Operation not supported in v8 and earlier drivers')
+
+ def test_delete_projects_from_ids_with_no_existing_project_id(self):
+ self.skipTest('Operation not supported in v8 and earlier drivers')
+
+ def test_delete_project_cascade(self):
+ self.skipTest('Operation not supported in v8 and earlier drivers')
+
+ def test_delete_large_project_cascade(self):
+ self.skipTest('Operation not supported in v8 and earlier drivers')
+
+ def test_hidden_project_domain_root_is_really_hidden(self):
+ self.skipTest('Operation not supported in v8 and earlier drivers')
+
+
+class TestSqlResourceDriverV8(unit.BaseTestCase,
+ test_backends.ResourceDriverTests):
+ def setUp(self):
+ super(TestSqlResourceDriverV8, self).setUp()
+
+ version_specifiers = {
+ 'keystone.resource': {
+ 'versionless_backend': 'backends',
+ 'versioned_backend': 'V8_backends'
+ }
+ }
+ self.useFixture(database.Database(version_specifiers))
+
+ self.driver = sql.Resource()
+
+ @unittest.skip('Null domain not allowed.')
+ def test_create_project_null_domain(self):
+ pass
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py
new file mode 100644
index 00000000..d9378c30
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py
@@ -0,0 +1,30 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.tests.unit import test_backend_sql
+
+
+class SqlIdentityV8(test_backend_sql.SqlIdentity):
+ """Test that a V8 driver still passes the same tests.
+
+ We use the SQL driver as an example of a V8 legacy driver.
+
+ """
+
+ def config_overrides(self):
+ super(SqlIdentityV8, self).config_overrides()
+ # V8 SQL specific driver overrides
+ self.config_fixture.config(
+ group='role',
+ driver='keystone.assignment.V8_role_backends.sql.Role')
+ self.use_specific_sql_driver_version(
+ 'keystone.assignment', 'role_backends', 'V8_')
diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/catalog/test_backends.py b/keystone-moon/keystone/tests/unit/catalog/test_backends.py
new file mode 100644
index 00000000..55898015
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/catalog/test_backends.py
@@ -0,0 +1,588 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+import mock
+from six.moves import range
+from testtools import matchers
+
+from keystone.catalog import core
+from keystone.common import driver_hints
+from keystone import exception
+from keystone.tests import unit
+
+
+class CatalogTests(object):
+
+ _legacy_endpoint_id_in_endpoint = True
+ _enabled_default_to_true_when_creating_endpoint = False
+
+ def test_region_crud(self):
+ # create
+ region_id = '0' * 255
+ new_region = unit.new_region_ref(id=region_id)
+ res = self.catalog_api.create_region(new_region)
+
+ # Ensure that we don't need to have a
+ # parent_region_id in the original supplied
+ # ref dict, but that it will be returned from
+ # the endpoint, with None value.
+ expected_region = new_region.copy()
+ expected_region['parent_region_id'] = None
+ self.assertDictEqual(expected_region, res)
+
+ # Test adding another region with the one above
+ # as its parent. We will check below whether deleting
+ # the parent successfully deletes any child regions.
+ parent_region_id = region_id
+ new_region = unit.new_region_ref(parent_region_id=parent_region_id)
+ region_id = new_region['id']
+ res = self.catalog_api.create_region(new_region)
+ self.assertDictEqual(new_region, res)
+
+ # list
+ regions = self.catalog_api.list_regions()
+ self.assertThat(regions, matchers.HasLength(2))
+ region_ids = [x['id'] for x in regions]
+ self.assertIn(parent_region_id, region_ids)
+ self.assertIn(region_id, region_ids)
+
+ # update
+ region_desc_update = {'description': uuid.uuid4().hex}
+ res = self.catalog_api.update_region(region_id, region_desc_update)
+ expected_region = new_region.copy()
+ expected_region['description'] = region_desc_update['description']
+ self.assertDictEqual(expected_region, res)
+
+ # delete
+ self.catalog_api.delete_region(parent_region_id)
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.delete_region,
+ parent_region_id)
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ parent_region_id)
+ # Ensure the child is also gone...
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_id)
+
+ def _create_region_with_parent_id(self, parent_id=None):
+ new_region = unit.new_region_ref(parent_region_id=parent_id)
+ self.catalog_api.create_region(new_region)
+ return new_region
+
+ def test_list_regions_filtered_by_parent_region_id(self):
+ new_region = self._create_region_with_parent_id()
+ parent_id = new_region['id']
+ new_region = self._create_region_with_parent_id(parent_id)
+ new_region = self._create_region_with_parent_id(parent_id)
+
+ # filter by parent_region_id
+ hints = driver_hints.Hints()
+ hints.add_filter('parent_region_id', parent_id)
+ regions = self.catalog_api.list_regions(hints)
+ for region in regions:
+ self.assertEqual(parent_id, region['parent_region_id'])
+
+ @unit.skip_if_cache_disabled('catalog')
+ def test_cache_layer_region_crud(self):
+ new_region = unit.new_region_ref()
+ region_id = new_region['id']
+ self.catalog_api.create_region(new_region.copy())
+ updated_region = copy.deepcopy(new_region)
+ updated_region['description'] = uuid.uuid4().hex
+ # cache the result
+ self.catalog_api.get_region(region_id)
+ # update the region bypassing catalog_api
+ self.catalog_api.driver.update_region(region_id, updated_region)
+ self.assertDictContainsSubset(new_region,
+ self.catalog_api.get_region(region_id))
+ self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
+ self.assertDictContainsSubset(updated_region,
+ self.catalog_api.get_region(region_id))
+ # delete the region
+ self.catalog_api.driver.delete_region(region_id)
+ # still get the old region
+ self.assertDictContainsSubset(updated_region,
+ self.catalog_api.get_region(region_id))
+ self.catalog_api.get_region.invalidate(self.catalog_api, region_id)
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region, region_id)
+
+ @unit.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_region(self):
+ new_region = unit.new_region_ref()
+ region_id = new_region['id']
+ self.catalog_api.create_region(new_region)
+
+ # cache the region
+ self.catalog_api.get_region(region_id)
+
+ # update the region via catalog_api
+ new_description = {'description': uuid.uuid4().hex}
+ self.catalog_api.update_region(region_id, new_description)
+
+ # assert that we can get the new region
+ current_region = self.catalog_api.get_region(region_id)
+ self.assertEqual(new_description['description'],
+ current_region['description'])
+
+ def test_create_region_with_duplicate_id(self):
+ new_region = unit.new_region_ref()
+ self.catalog_api.create_region(new_region)
+ # Create region again with duplicate id
+ self.assertRaises(exception.Conflict,
+ self.catalog_api.create_region,
+ new_region)
+
+ def test_get_region_returns_not_found(self):
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ uuid.uuid4().hex)
+
+ def test_delete_region_returns_not_found(self):
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.delete_region,
+ uuid.uuid4().hex)
+
+ def test_create_region_invalid_parent_region_returns_not_found(self):
+ new_region = unit.new_region_ref(parent_region_id='nonexisting')
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.create_region,
+ new_region)
+
+ def test_avoid_creating_circular_references_in_regions_update(self):
+ region_one = self._create_region_with_parent_id()
+
+ # self circle: region_one->region_one
+ self.assertRaises(exception.CircularRegionHierarchyError,
+ self.catalog_api.update_region,
+ region_one['id'],
+ {'parent_region_id': region_one['id']})
+
+ # region_one->region_two->region_one
+ region_two = self._create_region_with_parent_id(region_one['id'])
+ self.assertRaises(exception.CircularRegionHierarchyError,
+ self.catalog_api.update_region,
+ region_one['id'],
+ {'parent_region_id': region_two['id']})
+
+ # region_one region_two->region_three->region_four->region_two
+ region_three = self._create_region_with_parent_id(region_two['id'])
+ region_four = self._create_region_with_parent_id(region_three['id'])
+ self.assertRaises(exception.CircularRegionHierarchyError,
+ self.catalog_api.update_region,
+ region_two['id'],
+ {'parent_region_id': region_four['id']})
+
+ @mock.patch.object(core.CatalogDriverV8,
+ "_ensure_no_circle_in_hierarchical_regions")
+ def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle):
+ # turn off the enforcement so that cycles can be created for the test
+ mock_ensure_on_circle.return_value = None
+
+ region_one = self._create_region_with_parent_id()
+
+ # self circle: region_one->region_one
+ self.catalog_api.update_region(
+ region_one['id'],
+ {'parent_region_id': region_one['id']})
+ self.catalog_api.delete_region(region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_one['id'])
+
+ # region_one->region_two->region_one
+ region_one = self._create_region_with_parent_id()
+ region_two = self._create_region_with_parent_id(region_one['id'])
+ self.catalog_api.update_region(
+ region_one['id'],
+ {'parent_region_id': region_two['id']})
+ self.catalog_api.delete_region(region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_two['id'])
+
+ # region_one->region_two->region_three->region_one
+ region_one = self._create_region_with_parent_id()
+ region_two = self._create_region_with_parent_id(region_one['id'])
+ region_three = self._create_region_with_parent_id(region_two['id'])
+ self.catalog_api.update_region(
+ region_one['id'],
+ {'parent_region_id': region_three['id']})
+ self.catalog_api.delete_region(region_two['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_two['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_one['id'])
+ self.assertRaises(exception.RegionNotFound,
+ self.catalog_api.get_region,
+ region_three['id'])
+
+ def test_service_crud(self):
+ # create
+ new_service = unit.new_service_ref()
+ service_id = new_service['id']
+ res = self.catalog_api.create_service(service_id, new_service)
+ self.assertDictEqual(new_service, res)
+
+ # list
+ services = self.catalog_api.list_services()
+ self.assertIn(service_id, [x['id'] for x in services])
+
+ # update
+ service_name_update = {'name': uuid.uuid4().hex}
+ res = self.catalog_api.update_service(service_id, service_name_update)
+ expected_service = new_service.copy()
+ expected_service['name'] = service_name_update['name']
+ self.assertDictEqual(expected_service, res)
+
+ # delete
+ self.catalog_api.delete_service(service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ service_id)
+
+ def _create_random_service(self):
+ new_service = unit.new_service_ref()
+ service_id = new_service['id']
+ return self.catalog_api.create_service(service_id, new_service)
+
+ def test_service_filtering(self):
+ target_service = self._create_random_service()
+ unrelated_service1 = self._create_random_service()
+ unrelated_service2 = self._create_random_service()
+
+ # filter by type
+ hint_for_type = driver_hints.Hints()
+ hint_for_type.add_filter(name="type", value=target_service['type'])
+ services = self.catalog_api.list_services(hint_for_type)
+
+ self.assertEqual(1, len(services))
+ filtered_service = services[0]
+ self.assertEqual(target_service['type'], filtered_service['type'])
+ self.assertEqual(target_service['id'], filtered_service['id'])
+
+ # filter should have been removed, since it was already used by the
+ # backend
+ self.assertEqual(0, len(hint_for_type.filters))
+
+ # the backend shouldn't filter by name, since this is handled by the
+ # front end
+ hint_for_name = driver_hints.Hints()
+ hint_for_name.add_filter(name="name", value=target_service['name'])
+ services = self.catalog_api.list_services(hint_for_name)
+
+ self.assertEqual(3, len(services))
+
+ # filter should still be there, since it wasn't used by the backend
+ self.assertEqual(1, len(hint_for_name.filters))
+
+ self.catalog_api.delete_service(target_service['id'])
+ self.catalog_api.delete_service(unrelated_service1['id'])
+ self.catalog_api.delete_service(unrelated_service2['id'])
+
+ @unit.skip_if_cache_disabled('catalog')
+ def test_cache_layer_service_crud(self):
+ new_service = unit.new_service_ref()
+ service_id = new_service['id']
+ res = self.catalog_api.create_service(service_id, new_service)
+ self.assertDictEqual(new_service, res)
+ self.catalog_api.get_service(service_id)
+ updated_service = copy.deepcopy(new_service)
+ updated_service['description'] = uuid.uuid4().hex
+ # update bypassing catalog api
+ self.catalog_api.driver.update_service(service_id, updated_service)
+ self.assertDictContainsSubset(new_service,
+ self.catalog_api.get_service(service_id))
+ self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
+ self.assertDictContainsSubset(updated_service,
+ self.catalog_api.get_service(service_id))
+
+ # delete bypassing catalog api
+ self.catalog_api.driver.delete_service(service_id)
+ self.assertDictContainsSubset(updated_service,
+ self.catalog_api.get_service(service_id))
+ self.catalog_api.get_service.invalidate(self.catalog_api, service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ service_id)
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ service_id)
+
+ @unit.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_service(self):
+ new_service = unit.new_service_ref()
+ service_id = new_service['id']
+ self.catalog_api.create_service(service_id, new_service)
+
+ # cache the service
+ self.catalog_api.get_service(service_id)
+
+ # update the service via catalog api
+ new_type = {'type': uuid.uuid4().hex}
+ self.catalog_api.update_service(service_id, new_type)
+
+ # assert that we can get the new service
+ current_service = self.catalog_api.get_service(service_id)
+ self.assertEqual(new_type['type'], current_service['type'])
+
+ def test_delete_service_with_endpoint(self):
+ # create a service
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service
+ endpoint = unit.new_endpoint_ref(service_id=service['id'],
+ region_id=None)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # deleting the service should also delete the endpoint
+ self.catalog_api.delete_service(service['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ endpoint['id'])
+
+ def test_cache_layer_delete_service_with_endpoint(self):
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service
+ endpoint = unit.new_endpoint_ref(service_id=service['id'],
+ region_id=None)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ # cache the result
+ self.catalog_api.get_service(service['id'])
+ self.catalog_api.get_endpoint(endpoint['id'])
+ # delete the service bypassing catalog api
+ self.catalog_api.driver.delete_service(service['id'])
+ self.assertDictContainsSubset(endpoint,
+ self.catalog_api.
+ get_endpoint(endpoint['id']))
+ self.assertDictContainsSubset(service,
+ self.catalog_api.
+ get_service(service['id']))
+ self.catalog_api.get_endpoint.invalidate(self.catalog_api,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ endpoint['id'])
+ # multiple endpoints associated with a service
+ second_endpoint = unit.new_endpoint_ref(service_id=service['id'],
+ region_id=None)
+ self.catalog_api.create_service(service['id'], service)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.catalog_api.create_endpoint(second_endpoint['id'],
+ second_endpoint)
+ self.catalog_api.delete_service(service['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ second_endpoint['id'])
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ second_endpoint['id'])
+
+ def test_get_service_returns_not_found(self):
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.get_service,
+ uuid.uuid4().hex)
+
+ def test_delete_service_returns_not_found(self):
+ self.assertRaises(exception.ServiceNotFound,
+ self.catalog_api.delete_service,
+ uuid.uuid4().hex)
+
+ def test_create_endpoint_nonexistent_service(self):
+ endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex,
+ region_id=None)
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.create_endpoint,
+ endpoint['id'],
+ endpoint)
+
+ def test_update_endpoint_nonexistent_service(self):
+ dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
+ self._create_endpoints())
+ new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex)
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.update_endpoint,
+ enabled_endpoint['id'],
+ new_endpoint)
+
+ def test_create_endpoint_nonexistent_region(self):
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ endpoint = unit.new_endpoint_ref(service_id=service['id'])
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.create_endpoint,
+ endpoint['id'],
+ endpoint)
+
+ def test_update_endpoint_nonexistent_region(self):
+ dummy_service, enabled_endpoint, dummy_disabled_endpoint = (
+ self._create_endpoints())
+ new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex)
+ self.assertRaises(exception.ValidationError,
+ self.catalog_api.update_endpoint,
+ enabled_endpoint['id'],
+ new_endpoint)
+
+ def test_get_endpoint_returns_not_found(self):
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.get_endpoint,
+ uuid.uuid4().hex)
+
+ def test_delete_endpoint_returns_not_found(self):
+ self.assertRaises(exception.EndpointNotFound,
+ self.catalog_api.delete_endpoint,
+ uuid.uuid4().hex)
+
+ def test_create_endpoint(self):
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ endpoint = unit.new_endpoint_ref(service_id=service['id'],
+ region_id=None)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
+
+ def test_update_endpoint(self):
+ dummy_service_ref, endpoint_ref, dummy_disabled_endpoint_ref = (
+ self._create_endpoints())
+ res = self.catalog_api.update_endpoint(endpoint_ref['id'],
+ {'interface': 'private'})
+ expected_endpoint = endpoint_ref.copy()
+ expected_endpoint['enabled'] = True
+ expected_endpoint['interface'] = 'private'
+ if self._legacy_endpoint_id_in_endpoint:
+ expected_endpoint['legacy_endpoint_id'] = None
+ if self._enabled_default_to_true_when_creating_endpoint:
+ expected_endpoint['enabled'] = True
+ self.assertDictEqual(expected_endpoint, res)
+
+ def _create_endpoints(self):
+ # Creates a service and 2 endpoints for the service in the same region.
+ # The 'public' interface is enabled and the 'internal' interface is
+ # disabled.
+
+ def create_endpoint(service_id, region, **kwargs):
+ ref = unit.new_endpoint_ref(
+ service_id=service_id,
+ region_id=region,
+ url='http://localhost/%s' % uuid.uuid4().hex,
+ **kwargs)
+
+ self.catalog_api.create_endpoint(ref['id'], ref)
+ return ref
+
+ # Create a service for use with the endpoints.
+ service_ref = unit.new_service_ref()
+ service_id = service_ref['id']
+ self.catalog_api.create_service(service_id, service_ref)
+
+ region = unit.new_region_ref()
+ self.catalog_api.create_region(region)
+
+ # Create endpoints
+ enabled_endpoint_ref = create_endpoint(service_id, region['id'])
+ disabled_endpoint_ref = create_endpoint(
+ service_id, region['id'], enabled=False, interface='internal')
+
+ return service_ref, enabled_endpoint_ref, disabled_endpoint_ref
+
+ def test_list_endpoints(self):
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ expected_ids = set([uuid.uuid4().hex for _ in range(3)])
+ for endpoint_id in expected_ids:
+ endpoint = unit.new_endpoint_ref(service_id=service['id'],
+ id=endpoint_id,
+ region_id=None)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ endpoints = self.catalog_api.list_endpoints()
+ self.assertEqual(expected_ids, set(e['id'] for e in endpoints))
+
+ def test_get_catalog_endpoint_disabled(self):
+ """Get back only enabled endpoints when get the v2 catalog."""
+ service_ref, enabled_endpoint_ref, dummy_disabled_endpoint_ref = (
+ self._create_endpoints())
+
+ user_id = uuid.uuid4().hex
+ project_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_catalog(user_id, project_id)
+
+ exp_entry = {
+ 'id': enabled_endpoint_ref['id'],
+ 'name': service_ref['name'],
+ 'publicURL': enabled_endpoint_ref['url'],
+ }
+
+ region = enabled_endpoint_ref['region_id']
+ self.assertEqual(exp_entry, catalog[region][service_ref['type']])
+
+ def test_get_v3_catalog_endpoint_disabled(self):
+ """Get back only enabled endpoints when get the v3 catalog."""
+ enabled_endpoint_ref = self._create_endpoints()[1]
+
+ user_id = uuid.uuid4().hex
+ project_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_v3_catalog(user_id, project_id)
+
+ endpoint_ids = [x['id'] for x in catalog[0]['endpoints']]
+ self.assertEqual([enabled_endpoint_ref['id']], endpoint_ids)
+
+ @unit.skip_if_cache_disabled('catalog')
+ def test_invalidate_cache_when_updating_endpoint(self):
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ # create an endpoint attached to the service
+ endpoint = unit.new_endpoint_ref(service_id=service['id'],
+ region_id=None)
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+
+ # cache the endpoint
+ self.catalog_api.get_endpoint(endpoint['id'])
+
+ # update the endpoint via catalog api
+ new_url = {'url': uuid.uuid4().hex}
+ self.catalog_api.update_endpoint(endpoint['id'], new_url)
+
+ # assert that we can get the new endpoint
+ current_endpoint = self.catalog_api.get_endpoint(endpoint['id'])
+ self.assertEqual(new_url['url'], current_endpoint['url'])
diff --git a/keystone-moon/keystone/tests/unit/catalog/test_core.py b/keystone-moon/keystone/tests/unit/catalog/test_core.py
index 2f334bb6..b04b0bb7 100644
--- a/keystone-moon/keystone/tests/unit/catalog/test_core.py
+++ b/keystone-moon/keystone/tests/unit/catalog/test_core.py
@@ -10,27 +10,25 @@
# License for the specific language governing permissions and limitations
# under the License.
-from oslo_config import cfg
+import uuid
from keystone.catalog import core
from keystone import exception
from keystone.tests import unit
-CONF = cfg.CONF
-
-
class FormatUrlTests(unit.BaseTestCase):
def test_successful_formatting(self):
url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
- '$(tenant_id)s/$(user_id)s')
+ '$(tenant_id)s/$(user_id)s/$(project_id)s')
+ project_id = uuid.uuid4().hex
values = {'public_bind_host': 'server', 'admin_port': 9090,
- 'tenant_id': 'A', 'user_id': 'B'}
+ 'tenant_id': 'A', 'user_id': 'B', 'project_id': project_id}
actual_url = core.format_url(url_template, values)
- expected_url = 'http://server:9090/A/B'
- self.assertEqual(actual_url, expected_url)
+ expected_url = 'http://server:9090/A/B/%s' % (project_id,)
+ self.assertEqual(expected_url, actual_url)
def test_raises_malformed_on_missing_key(self):
self.assertRaises(exception.MalformedEndpoint,
@@ -73,7 +71,7 @@ class FormatUrlTests(unit.BaseTestCase):
url_template,
values)
- def test_substitution_with_allowed_keyerror(self):
+ def test_substitution_with_allowed_tenant_keyerror(self):
# No value of 'tenant_id' is passed into url_template.
# mod: format_url will return None instead of raising
# "MalformedEndpoint" exception.
@@ -86,3 +84,17 @@ class FormatUrlTests(unit.BaseTestCase):
'user_id': 'B'}
self.assertIsNone(core.format_url(url_template, values,
silent_keyerror_failures=['tenant_id']))
+
+ def test_substitution_with_allowed_project_keyerror(self):
+ # No value of 'project_id' is passed into url_template.
+ # mod: format_url will return None instead of raising
+ # "MalformedEndpoint" exception.
+ # This is intentional behavior since we don't want to skip
+ # all the later endpoints once there is an URL of endpoint
+ # trying to replace 'project_id' with None.
+ url_template = ('http://$(public_bind_host)s:$(admin_port)d/'
+ '$(project_id)s/$(user_id)s')
+ values = {'public_bind_host': 'server', 'admin_port': 9090,
+ 'user_id': 'B'}
+ self.assertIsNone(core.format_url(url_template, values,
+ silent_keyerror_failures=['project_id']))
diff --git a/keystone-moon/keystone/tests/unit/common/test_authorization.py b/keystone-moon/keystone/tests/unit/common/test_authorization.py
new file mode 100644
index 00000000..73ddbc61
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/common/test_authorization.py
@@ -0,0 +1,161 @@
+# Copyright 2015 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import copy
+import uuid
+
+from keystone.common import authorization
+from keystone import exception
+from keystone.federation import constants as federation_constants
+from keystone.models import token_model
+from keystone.tests import unit
+from keystone.tests.unit import test_token_provider
+
+
+class TestTokenToAuthContext(unit.BaseTestCase):
+ def test_token_is_project_scoped_with_trust(self):
+ # Check auth_context result when the token is project-scoped and has
+ # trust info.
+
+ # SAMPLE_V3_TOKEN has OS-TRUST:trust in it.
+ token_data = test_token_provider.SAMPLE_V3_TOKEN
+ token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=token_data)
+
+ auth_context = authorization.token_to_auth_context(token)
+
+ self.assertEqual(token, auth_context['token'])
+ self.assertTrue(auth_context['is_delegated_auth'])
+ self.assertEqual(token_data['token']['user']['id'],
+ auth_context['user_id'])
+ self.assertEqual(token_data['token']['user']['domain']['id'],
+ auth_context['user_domain_id'])
+ self.assertEqual(token_data['token']['project']['id'],
+ auth_context['project_id'])
+ self.assertEqual(token_data['token']['project']['domain']['id'],
+ auth_context['project_domain_id'])
+ self.assertNotIn('domain_id', auth_context)
+ self.assertNotIn('domain_name', auth_context)
+ self.assertEqual(token_data['token']['OS-TRUST:trust']['id'],
+ auth_context['trust_id'])
+ self.assertEqual(
+ token_data['token']['OS-TRUST:trust']['trustor_user_id'],
+ auth_context['trustor_id'])
+ self.assertEqual(
+ token_data['token']['OS-TRUST:trust']['trustee_user_id'],
+ auth_context['trustee_id'])
+ self.assertItemsEqual(
+ [r['name'] for r in token_data['token']['roles']],
+ auth_context['roles'])
+ self.assertIsNone(auth_context['consumer_id'])
+ self.assertIsNone(auth_context['access_token_id'])
+ self.assertNotIn('group_ids', auth_context)
+
+ def test_token_is_domain_scoped(self):
+ # Check contents of auth_context when token is domain-scoped.
+ token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
+ del token_data['token']['project']
+
+ domain_id = uuid.uuid4().hex
+ domain_name = uuid.uuid4().hex
+ token_data['token']['domain'] = {'id': domain_id, 'name': domain_name}
+
+ token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=token_data)
+
+ auth_context = authorization.token_to_auth_context(token)
+
+ self.assertNotIn('project_id', auth_context)
+ self.assertNotIn('project_domain_id', auth_context)
+
+ self.assertEqual(domain_id, auth_context['domain_id'])
+ self.assertEqual(domain_name, auth_context['domain_name'])
+
+ def test_token_is_unscoped(self):
+ # Check contents of auth_context when the token is unscoped.
+ token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
+ del token_data['token']['project']
+
+ token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=token_data)
+
+ auth_context = authorization.token_to_auth_context(token)
+
+ self.assertNotIn('project_id', auth_context)
+ self.assertNotIn('project_domain_id', auth_context)
+ self.assertNotIn('domain_id', auth_context)
+ self.assertNotIn('domain_name', auth_context)
+
+ def test_token_is_for_federated_user(self):
+ # When the token is for a federated user then group_ids is in
+ # auth_context.
+ token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
+
+ group_ids = [uuid.uuid4().hex for x in range(1, 5)]
+
+ federation_data = {'identity_provider': {'id': uuid.uuid4().hex},
+ 'protocol': {'id': 'saml2'},
+ 'groups': [{'id': gid} for gid in group_ids]}
+ token_data['token']['user'][federation_constants.FEDERATION] = (
+ federation_data)
+
+ token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=token_data)
+
+ auth_context = authorization.token_to_auth_context(token)
+
+ self.assertItemsEqual(group_ids, auth_context['group_ids'])
+
+ def test_oauth_variables_set_for_oauth_token(self):
+ token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
+ access_token_id = uuid.uuid4().hex
+ consumer_id = uuid.uuid4().hex
+ token_data['token']['OS-OAUTH1'] = {'access_token_id': access_token_id,
+ 'consumer_id': consumer_id}
+ token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=token_data)
+
+ auth_context = authorization.token_to_auth_context(token)
+
+ self.assertEqual(access_token_id, auth_context['access_token_id'])
+ self.assertEqual(consumer_id, auth_context['consumer_id'])
+
+ def test_oauth_variables_not_set(self):
+ token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
+ token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=token_data)
+
+ auth_context = authorization.token_to_auth_context(token)
+
+ self.assertIsNone(auth_context['access_token_id'])
+ self.assertIsNone(auth_context['consumer_id'])
+
+ def test_token_is_not_KeystoneToken_raises_exception(self):
+ # If the token isn't a KeystoneToken then an UnexpectedError exception
+ # is raised.
+ self.assertRaises(exception.UnexpectedError,
+ authorization.token_to_auth_context, {})
+
+ def test_user_id_missing_in_token_raises_exception(self):
+ # If there's no user ID in the token then an Unauthorized
+ # exception is raised.
+ token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN)
+ del token_data['token']['user']['id']
+
+ token = token_model.KeystoneToken(token_id=uuid.uuid4().hex,
+ token_data=token_data)
+
+ self.assertRaises(exception.Unauthorized,
+ authorization.token_to_auth_context, token)
diff --git a/keystone-moon/keystone/tests/unit/common/test_ldap.py b/keystone-moon/keystone/tests/unit/common/test_ldap.py
index e6e2c732..eed77286 100644
--- a/keystone-moon/keystone/tests/unit/common/test_ldap.py
+++ b/keystone-moon/keystone/tests/unit/common/test_ldap.py
@@ -27,6 +27,7 @@ from keystone.common.ldap import core as common_ldap_core
from keystone.tests import unit
from keystone.tests.unit import default_fixtures
from keystone.tests.unit import fakeldap
+from keystone.tests.unit.ksfixtures import database
CONF = cfg.CONF
@@ -195,8 +196,8 @@ class DnCompareTest(unit.BaseTestCase):
def test_startswith_unicode(self):
# dn_startswith accepts unicode.
- child = u'cn=cn=fäké,ou=OpenStäck'
- parent = 'ou=OpenStäck'
+ child = u'cn=fäké,ou=OpenStäck'
+ parent = u'ou=OpenStäck'
self.assertTrue(ks_ldap.dn_startswith(child, parent))
@@ -207,6 +208,8 @@ class LDAPDeleteTreeTest(unit.TestCase):
ks_ldap.register_handler('fake://',
fakeldap.FakeLdapNoSubtreeDelete)
+ self.useFixture(database.Database(self.sql_driver_version_overrides))
+
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -226,11 +229,11 @@ class LDAPDeleteTreeTest(unit.TestCase):
config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
return config_files
- def test_deleteTree(self):
+ def test_delete_tree(self):
"""Test manually deleting a tree.
Few LDAP servers support CONTROL_DELETETREE. This test
- exercises the alternate code paths in BaseLdap.deleteTree.
+ exercises the alternate code paths in BaseLdap.delete_tree.
"""
conn = self.identity_api.user.get_connection()
@@ -251,7 +254,7 @@ class LDAPDeleteTreeTest(unit.TestCase):
# cn=base
# cn=child,cn=base
# cn=grandchild,cn=child,cn=base
- # then attempt to deleteTree(cn=base)
+ # then attempt to delete_tree(cn=base)
base_id = 'base'
base_dn = create_entry(base_id)
child_dn = create_entry('child', base_dn)
@@ -273,8 +276,8 @@ class LDAPDeleteTreeTest(unit.TestCase):
self.assertRaises(ldap.NOT_ALLOWED_ON_NONLEAF,
conn.delete_s, child_dn)
- # call our deleteTree implementation
- self.identity_api.user.deleteTree(base_id)
+ # call our delete_tree implementation
+ self.identity_api.user.delete_tree(base_id)
self.assertRaises(ldap.NO_SUCH_OBJECT,
conn.search_s, base_dn, ldap.SCOPE_BASE)
self.assertRaises(ldap.NO_SUCH_OBJECT,
@@ -283,6 +286,24 @@ class LDAPDeleteTreeTest(unit.TestCase):
conn.search_s, grandchild_dn, ldap.SCOPE_BASE)
+class MultiURLTests(unit.TestCase):
+ """Tests for setting multiple LDAP URLs."""
+
+ def test_multiple_urls_with_comma_no_conn_pool(self):
+ urls = 'ldap://localhost,ldap://backup.localhost'
+ self.config_fixture.config(group='ldap', url=urls, use_pool=False)
+ base_ldap = ks_ldap.BaseLdap(CONF)
+ ldap_connection = base_ldap.get_connection()
+ self.assertEqual(urls, ldap_connection.conn.conn._uri)
+
+ def test_multiple_urls_with_comma_with_conn_pool(self):
+ urls = 'ldap://localhost,ldap://backup.localhost'
+ self.config_fixture.config(group='ldap', url=urls, use_pool=True)
+ base_ldap = ks_ldap.BaseLdap(CONF)
+ ldap_connection = base_ldap.get_connection()
+ self.assertEqual(urls, ldap_connection.conn.conn_pool.uri)
+
+
class SslTlsTest(unit.TestCase):
"""Tests for the SSL/TLS functionality in keystone.common.ldap.core."""
@@ -359,6 +380,7 @@ class LDAPPagedResultsTest(unit.TestCase):
ks_ldap.register_handler('fake://', fakeldap.FakeLdap)
self.addCleanup(common_ldap_core._HANDLERS.clear)
+ self.useFixture(database.Database(self.sql_driver_version_overrides))
self.load_backends()
self.load_fixtures(default_fixtures)
diff --git a/keystone-moon/keystone/tests/unit/common/test_manager.py b/keystone-moon/keystone/tests/unit/common/test_manager.py
index 1bc19763..7ef91e15 100644
--- a/keystone-moon/keystone/tests/unit/common/test_manager.py
+++ b/keystone-moon/keystone/tests/unit/common/test_manager.py
@@ -24,7 +24,7 @@ class TestCreateLegacyDriver(unit.BaseTestCase):
Driver = manager.create_legacy_driver(catalog.CatalogDriverV8)
# NOTE(dstanek): I want to subvert the requirement for this
- # class to implement all of the abstractmethods.
+ # class to implement all of the abstract methods.
Driver.__abstractmethods__ = set()
impl = Driver()
@@ -32,8 +32,9 @@ class TestCreateLegacyDriver(unit.BaseTestCase):
'as_of': 'Liberty',
'what': 'keystone.catalog.core.Driver',
'in_favor_of': 'keystone.catalog.core.CatalogDriverV8',
- 'remove_in': 'N',
+ 'remove_in': mock.ANY,
}
mock_reporter.assert_called_with(mock.ANY, mock.ANY, details)
+ self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0])
self.assertIsInstance(impl, catalog.CatalogDriverV8)
diff --git a/keystone-moon/keystone/tests/unit/common/test_notifications.py b/keystone-moon/keystone/tests/unit/common/test_notifications.py
index 1ad8d50d..aa2e6f72 100644
--- a/keystone-moon/keystone/tests/unit/common/test_notifications.py
+++ b/keystone-moon/keystone/tests/unit/common/test_notifications.py
@@ -43,9 +43,7 @@ class ArbitraryException(Exception):
def register_callback(operation, resource_type=EXP_RESOURCE_TYPE):
- """Helper for creating and registering a mock callback.
-
- """
+ """Helper for creating and registering a mock callback."""
callback = mock.Mock(__name__='callback',
im_class=mock.Mock(__name__='class'))
notifications.register_event_callback(operation, resource_type, callback)
@@ -95,89 +93,14 @@ class AuditNotificationsTestCase(unit.BaseTestCase):
DISABLED_OPERATION)
-class NotificationsWrapperTestCase(unit.BaseTestCase):
- def create_fake_ref(self):
- resource_id = uuid.uuid4().hex
- return resource_id, {
- 'id': resource_id,
- 'key': uuid.uuid4().hex
- }
-
- @notifications.created(EXP_RESOURCE_TYPE)
- def create_resource(self, resource_id, data):
- return data
-
- def test_resource_created_notification(self):
- exp_resource_id, data = self.create_fake_ref()
- callback = register_callback(CREATED_OPERATION)
-
- self.create_resource(exp_resource_id, data)
- callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
- CREATED_OPERATION,
- {'resource_info': exp_resource_id})
-
- @notifications.updated(EXP_RESOURCE_TYPE)
- def update_resource(self, resource_id, data):
- return data
-
- def test_resource_updated_notification(self):
- exp_resource_id, data = self.create_fake_ref()
- callback = register_callback(UPDATED_OPERATION)
-
- self.update_resource(exp_resource_id, data)
- callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
- UPDATED_OPERATION,
- {'resource_info': exp_resource_id})
-
- @notifications.deleted(EXP_RESOURCE_TYPE)
- def delete_resource(self, resource_id):
- pass
-
- def test_resource_deleted_notification(self):
- exp_resource_id = uuid.uuid4().hex
- callback = register_callback(DELETED_OPERATION)
-
- self.delete_resource(exp_resource_id)
- callback.assert_called_with('identity', EXP_RESOURCE_TYPE,
- DELETED_OPERATION,
- {'resource_info': exp_resource_id})
-
- @notifications.created(EXP_RESOURCE_TYPE)
- def create_exception(self, resource_id):
- raise ArbitraryException()
-
- def test_create_exception_without_notification(self):
- callback = register_callback(CREATED_OPERATION)
- self.assertRaises(
- ArbitraryException, self.create_exception, uuid.uuid4().hex)
- self.assertFalse(callback.called)
-
- @notifications.created(EXP_RESOURCE_TYPE)
- def update_exception(self, resource_id):
- raise ArbitraryException()
-
- def test_update_exception_without_notification(self):
- callback = register_callback(UPDATED_OPERATION)
- self.assertRaises(
- ArbitraryException, self.update_exception, uuid.uuid4().hex)
- self.assertFalse(callback.called)
-
- @notifications.deleted(EXP_RESOURCE_TYPE)
- def delete_exception(self, resource_id):
- raise ArbitraryException()
-
- def test_delete_exception_without_notification(self):
- callback = register_callback(DELETED_OPERATION)
- self.assertRaises(
- ArbitraryException, self.delete_exception, uuid.uuid4().hex)
- self.assertFalse(callback.called)
-
-
class NotificationsTestCase(unit.BaseTestCase):
def test_send_notification(self):
- """Test the private method _send_notification to ensure event_type,
- payload, and context are built and passed properly.
+ """Test _send_notification.
+
+ Test the private method _send_notification to ensure event_type,
+ payload, and context are built and passed properly.
+
"""
resource = uuid.uuid4().hex
resource_type = EXP_RESOURCE_TYPE
@@ -203,6 +126,82 @@ class NotificationsTestCase(unit.BaseTestCase):
resource)
mocked.assert_called_once_with(*expected_args)
+ def test_send_notification_with_opt_out(self):
+ """Test the private method _send_notification with opt-out.
+
+ Test that _send_notification does not notify when a valid
+ notification_opt_out configuration is provided.
+ """
+ resource = uuid.uuid4().hex
+ resource_type = EXP_RESOURCE_TYPE
+ operation = CREATED_OPERATION
+ event_type = 'identity.%s.created' % resource_type
+
+ # NOTE(diazjf): Here we add notification_opt_out to the
+ # configuration so that we should return before _get_notifer is
+ # called. This is because we are opting out notifications for the
+ # passed resource_type and operation.
+ conf = self.useFixture(config_fixture.Config(CONF))
+ conf.config(notification_opt_out=event_type)
+
+ with mock.patch.object(notifications._get_notifier(),
+ '_notify') as mocked:
+
+ notifications._send_notification(operation, resource_type,
+ resource)
+ mocked.assert_not_called()
+
+ def test_send_audit_notification_with_opt_out(self):
+ """Test the private method _send_audit_notification with opt-out.
+
+ Test that _send_audit_notification does not notify when a valid
+ notification_opt_out configuration is provided.
+ """
+ resource_type = EXP_RESOURCE_TYPE
+
+ action = CREATED_OPERATION + '.' + resource_type
+ initiator = mock
+ target = mock
+ outcome = 'success'
+ event_type = 'identity.%s.created' % resource_type
+
+ conf = self.useFixture(config_fixture.Config(CONF))
+ conf.config(notification_opt_out=event_type)
+
+ with mock.patch.object(notifications._get_notifier(),
+ '_notify') as mocked:
+
+ notifications._send_audit_notification(action,
+ initiator,
+ outcome,
+ target,
+ event_type)
+ mocked.assert_not_called()
+
+ def test_opt_out_authenticate_event(self):
+ """Test that authenticate events are successfully opted out."""
+ resource_type = EXP_RESOURCE_TYPE
+
+ action = CREATED_OPERATION + '.' + resource_type
+ initiator = mock
+ target = mock
+ outcome = 'success'
+ event_type = 'identity.authenticate'
+ meter_name = '%s.%s' % (event_type, outcome)
+
+ conf = self.useFixture(config_fixture.Config(CONF))
+ conf.config(notification_opt_out=meter_name)
+
+ with mock.patch.object(notifications._get_notifier(),
+ '_notify') as mocked:
+
+ notifications._send_audit_notification(action,
+ initiator,
+ outcome,
+ target,
+ event_type)
+ mocked.assert_not_called()
+
class BaseNotificationTest(test_v3.RestfulTestCase):
@@ -213,13 +212,17 @@ class BaseNotificationTest(test_v3.RestfulTestCase):
self._audits = []
def fake_notify(operation, resource_type, resource_id,
- public=True):
+ actor_dict=None, public=True):
note = {
'resource_id': resource_id,
'operation': operation,
'resource_type': resource_type,
'send_notification_called': True,
'public': public}
+ if actor_dict:
+ note['actor_id'] = actor_dict.get('id')
+ note['actor_type'] = actor_dict.get('type')
+ note['actor_operation'] = actor_dict.get('actor_operation')
self._notifications.append(note)
self.useFixture(mockpatch.PatchObject(
@@ -249,17 +252,23 @@ class BaseNotificationTest(test_v3.RestfulTestCase):
self.useFixture(mockpatch.PatchObject(
notifications, '_send_audit_notification', fake_audit))
- def _assert_last_note(self, resource_id, operation, resource_type):
+ def _assert_last_note(self, resource_id, operation, resource_type,
+ actor_id=None, actor_type=None,
+ actor_operation=None):
# NOTE(stevemar): If 'basic' format is not used, then simply
# return since this assertion is not valid.
if CONF.notification_format != 'basic':
return
self.assertTrue(len(self._notifications) > 0)
note = self._notifications[-1]
- self.assertEqual(note['operation'], operation)
- self.assertEqual(note['resource_id'], resource_id)
- self.assertEqual(note['resource_type'], resource_type)
+ self.assertEqual(operation, note['operation'])
+ self.assertEqual(resource_id, note['resource_id'])
+ self.assertEqual(resource_type, note['resource_type'])
self.assertTrue(note['send_notification_called'])
+ if actor_id:
+ self.assertEqual(actor_id, note['actor_id'])
+ self.assertEqual(actor_type, note['actor_type'])
+ self.assertEqual(actor_operation, note['actor_operation'])
def _assert_last_audit(self, resource_id, operation, resource_type,
target_uri):
@@ -318,14 +327,14 @@ class BaseNotificationTest(test_v3.RestfulTestCase):
class NotificationsForEntities(BaseNotificationTest):
def test_create_group(self):
- group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self._assert_last_note(group_ref['id'], CREATED_OPERATION, 'group')
self._assert_last_audit(group_ref['id'], CREATED_OPERATION, 'group',
cadftaxonomy.SECURITY_GROUP)
def test_create_project(self):
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self._assert_last_note(
project_ref['id'], CREATED_OPERATION, 'project')
@@ -333,27 +342,27 @@ class NotificationsForEntities(BaseNotificationTest):
'project', cadftaxonomy.SECURITY_PROJECT)
def test_create_role(self):
- role_ref = self.new_role_ref()
+ role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role')
self._assert_last_audit(role_ref['id'], CREATED_OPERATION, 'role',
cadftaxonomy.SECURITY_ROLE)
def test_create_user(self):
- user_ref = self.new_user_ref(domain_id=self.domain_id)
+ user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self._assert_last_note(user_ref['id'], CREATED_OPERATION, 'user')
self._assert_last_audit(user_ref['id'], CREATED_OPERATION, 'user',
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_create_trust(self):
- trustor = self.new_user_ref(domain_id=self.domain_id)
+ trustor = unit.new_user_ref(domain_id=self.domain_id)
trustor = self.identity_api.create_user(trustor)
- trustee = self.new_user_ref(domain_id=self.domain_id)
+ trustee = unit.new_user_ref(domain_id=self.domain_id)
trustee = self.identity_api.create_user(trustee)
- role_ref = self.new_role_ref()
+ role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
- trust_ref = self.new_trust_ref(trustor['id'],
+ trust_ref = unit.new_trust_ref(trustor['id'],
trustee['id'])
self.trust_api.create_trust(trust_ref['id'],
trust_ref,
@@ -364,7 +373,7 @@ class NotificationsForEntities(BaseNotificationTest):
'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
def test_delete_group(self):
- group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.delete_group(group_ref['id'])
self._assert_last_note(group_ref['id'], DELETED_OPERATION, 'group')
@@ -372,7 +381,7 @@ class NotificationsForEntities(BaseNotificationTest):
cadftaxonomy.SECURITY_GROUP)
def test_delete_project(self):
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.resource_api.delete_project(project_ref['id'])
self._assert_last_note(
@@ -381,7 +390,7 @@ class NotificationsForEntities(BaseNotificationTest):
'project', cadftaxonomy.SECURITY_PROJECT)
def test_delete_role(self):
- role_ref = self.new_role_ref()
+ role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self.role_api.delete_role(role_ref['id'])
self._assert_last_note(role_ref['id'], DELETED_OPERATION, 'role')
@@ -389,7 +398,7 @@ class NotificationsForEntities(BaseNotificationTest):
cadftaxonomy.SECURITY_ROLE)
def test_delete_user(self):
- user_ref = self.new_user_ref(domain_id=self.domain_id)
+ user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self.identity_api.delete_user(user_ref['id'])
self._assert_last_note(user_ref['id'], DELETED_OPERATION, 'user')
@@ -397,14 +406,14 @@ class NotificationsForEntities(BaseNotificationTest):
cadftaxonomy.SECURITY_ACCOUNT_USER)
def test_create_domain(self):
- domain_ref = self.new_domain_ref()
+ domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
self._assert_last_note(domain_ref['id'], CREATED_OPERATION, 'domain')
self._assert_last_audit(domain_ref['id'], CREATED_OPERATION, 'domain',
cadftaxonomy.SECURITY_DOMAIN)
def test_update_domain(self):
- domain_ref = self.new_domain_ref()
+ domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['description'] = uuid.uuid4().hex
self.resource_api.update_domain(domain_ref['id'], domain_ref)
@@ -413,7 +422,7 @@ class NotificationsForEntities(BaseNotificationTest):
cadftaxonomy.SECURITY_DOMAIN)
def test_delete_domain(self):
- domain_ref = self.new_domain_ref()
+ domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
self.resource_api.update_domain(domain_ref['id'], domain_ref)
@@ -423,12 +432,12 @@ class NotificationsForEntities(BaseNotificationTest):
cadftaxonomy.SECURITY_DOMAIN)
def test_delete_trust(self):
- trustor = self.new_user_ref(domain_id=self.domain_id)
+ trustor = unit.new_user_ref(domain_id=self.domain_id)
trustor = self.identity_api.create_user(trustor)
- trustee = self.new_user_ref(domain_id=self.domain_id)
+ trustee = unit.new_user_ref(domain_id=self.domain_id)
trustee = self.identity_api.create_user(trustee)
- role_ref = self.new_role_ref()
- trust_ref = self.new_trust_ref(trustor['id'], trustee['id'])
+ role_ref = unit.new_role_ref()
+ trust_ref = unit.new_trust_ref(trustor['id'], trustee['id'])
self.trust_api.create_trust(trust_ref['id'],
trust_ref,
[role_ref])
@@ -439,7 +448,9 @@ class NotificationsForEntities(BaseNotificationTest):
'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST)
def test_create_endpoint(self):
- endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self._assert_notify_sent(endpoint_ref['id'], CREATED_OPERATION,
'endpoint')
@@ -447,7 +458,9 @@ class NotificationsForEntities(BaseNotificationTest):
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_update_endpoint(self):
- endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self.catalog_api.update_endpoint(endpoint_ref['id'], endpoint_ref)
self._assert_notify_sent(endpoint_ref['id'], UPDATED_OPERATION,
@@ -456,7 +469,9 @@ class NotificationsForEntities(BaseNotificationTest):
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_delete_endpoint(self):
- endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref)
self.catalog_api.delete_endpoint(endpoint_ref['id'])
self._assert_notify_sent(endpoint_ref['id'], DELETED_OPERATION,
@@ -465,7 +480,7 @@ class NotificationsForEntities(BaseNotificationTest):
'endpoint', cadftaxonomy.SECURITY_ENDPOINT)
def test_create_service(self):
- service_ref = self.new_service_ref()
+ service_ref = unit.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self._assert_notify_sent(service_ref['id'], CREATED_OPERATION,
'service')
@@ -473,7 +488,7 @@ class NotificationsForEntities(BaseNotificationTest):
'service', cadftaxonomy.SECURITY_SERVICE)
def test_update_service(self):
- service_ref = self.new_service_ref()
+ service_ref = unit.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self.catalog_api.update_service(service_ref['id'], service_ref)
self._assert_notify_sent(service_ref['id'], UPDATED_OPERATION,
@@ -482,7 +497,7 @@ class NotificationsForEntities(BaseNotificationTest):
'service', cadftaxonomy.SECURITY_SERVICE)
def test_delete_service(self):
- service_ref = self.new_service_ref()
+ service_ref = unit.new_service_ref()
self.catalog_api.create_service(service_ref['id'], service_ref)
self.catalog_api.delete_service(service_ref['id'])
self._assert_notify_sent(service_ref['id'], DELETED_OPERATION,
@@ -491,7 +506,7 @@ class NotificationsForEntities(BaseNotificationTest):
'service', cadftaxonomy.SECURITY_SERVICE)
def test_create_region(self):
- region_ref = self.new_region_ref()
+ region_ref = unit.new_region_ref()
self.catalog_api.create_region(region_ref)
self._assert_notify_sent(region_ref['id'], CREATED_OPERATION,
'region')
@@ -499,7 +514,7 @@ class NotificationsForEntities(BaseNotificationTest):
'region', cadftaxonomy.SECURITY_REGION)
def test_update_region(self):
- region_ref = self.new_region_ref()
+ region_ref = unit.new_region_ref()
self.catalog_api.create_region(region_ref)
self.catalog_api.update_region(region_ref['id'], region_ref)
self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION,
@@ -508,7 +523,7 @@ class NotificationsForEntities(BaseNotificationTest):
'region', cadftaxonomy.SECURITY_REGION)
def test_delete_region(self):
- region_ref = self.new_region_ref()
+ region_ref = unit.new_region_ref()
self.catalog_api.create_region(region_ref)
self.catalog_api.delete_region(region_ref['id'])
self._assert_notify_sent(region_ref['id'], DELETED_OPERATION,
@@ -517,7 +532,7 @@ class NotificationsForEntities(BaseNotificationTest):
'region', cadftaxonomy.SECURITY_REGION)
def test_create_policy(self):
- policy_ref = self.new_policy_ref()
+ policy_ref = unit.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION,
'policy')
@@ -525,7 +540,7 @@ class NotificationsForEntities(BaseNotificationTest):
'policy', cadftaxonomy.SECURITY_POLICY)
def test_update_policy(self):
- policy_ref = self.new_policy_ref()
+ policy_ref = unit.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self.policy_api.update_policy(policy_ref['id'], policy_ref)
self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION,
@@ -534,7 +549,7 @@ class NotificationsForEntities(BaseNotificationTest):
'policy', cadftaxonomy.SECURITY_POLICY)
def test_delete_policy(self):
- policy_ref = self.new_policy_ref()
+ policy_ref = unit.new_policy_ref()
self.policy_api.create_policy(policy_ref['id'], policy_ref)
self.policy_api.delete_policy(policy_ref['id'])
self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION,
@@ -543,7 +558,7 @@ class NotificationsForEntities(BaseNotificationTest):
'policy', cadftaxonomy.SECURITY_POLICY)
def test_disable_domain(self):
- domain_ref = self.new_domain_ref()
+ domain_ref = unit.new_domain_ref()
self.resource_api.create_domain(domain_ref['id'], domain_ref)
domain_ref['enabled'] = False
self.resource_api.update_domain(domain_ref['id'], domain_ref)
@@ -551,8 +566,7 @@ class NotificationsForEntities(BaseNotificationTest):
public=False)
def test_disable_of_disabled_domain_does_not_notify(self):
- domain_ref = self.new_domain_ref()
- domain_ref['enabled'] = False
+ domain_ref = unit.new_domain_ref(enabled=False)
self.resource_api.create_domain(domain_ref['id'], domain_ref)
# The domain_ref above is not changed during the create process. We
# can use the same ref to perform the update.
@@ -561,7 +575,7 @@ class NotificationsForEntities(BaseNotificationTest):
public=False)
def test_update_group(self):
- group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group_ref = unit.new_group_ref(domain_id=self.domain_id)
group_ref = self.identity_api.create_group(group_ref)
self.identity_api.update_group(group_ref['id'], group_ref)
self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group')
@@ -569,7 +583,7 @@ class NotificationsForEntities(BaseNotificationTest):
cadftaxonomy.SECURITY_GROUP)
def test_update_project(self):
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.resource_api.update_project(project_ref['id'], project_ref)
self._assert_notify_sent(
@@ -578,7 +592,7 @@ class NotificationsForEntities(BaseNotificationTest):
'project', cadftaxonomy.SECURITY_PROJECT)
def test_disable_project(self):
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = False
self.resource_api.update_project(project_ref['id'], project_ref)
@@ -586,8 +600,8 @@ class NotificationsForEntities(BaseNotificationTest):
public=False)
def test_disable_of_disabled_project_does_not_notify(self):
- project_ref = self.new_project_ref(domain_id=self.domain_id)
- project_ref['enabled'] = False
+ project_ref = unit.new_project_ref(domain_id=self.domain_id,
+ enabled=False)
self.resource_api.create_project(project_ref['id'], project_ref)
# The project_ref above is not changed during the create process. We
# can use the same ref to perform the update.
@@ -596,7 +610,7 @@ class NotificationsForEntities(BaseNotificationTest):
public=False)
def test_update_project_does_not_send_disable(self):
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
project_ref['enabled'] = True
self.resource_api.update_project(project_ref['id'], project_ref)
@@ -605,7 +619,7 @@ class NotificationsForEntities(BaseNotificationTest):
self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project')
def test_update_role(self):
- role_ref = self.new_role_ref()
+ role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
self.role_api.update_role(role_ref['id'], role_ref)
self._assert_last_note(role_ref['id'], UPDATED_OPERATION, 'role')
@@ -613,7 +627,7 @@ class NotificationsForEntities(BaseNotificationTest):
cadftaxonomy.SECURITY_ROLE)
def test_update_user(self):
- user_ref = self.new_user_ref(domain_id=self.domain_id)
+ user_ref = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(user_ref)
self.identity_api.update_user(user_ref['id'], user_ref)
self._assert_last_note(user_ref['id'], UPDATED_OPERATION, 'user')
@@ -622,7 +636,7 @@ class NotificationsForEntities(BaseNotificationTest):
def test_config_option_no_events(self):
self.config_fixture.config(notification_format='basic')
- role_ref = self.new_role_ref()
+ role_ref = unit.new_role_ref()
self.role_api.create_role(role_ref['id'], role_ref)
# The regular notifications will still be emitted, since they are
# used for callback handling.
@@ -630,6 +644,28 @@ class NotificationsForEntities(BaseNotificationTest):
# No audit event should have occurred
self.assertEqual(0, len(self._audits))
+ def test_add_user_to_group(self):
+ user_ref = unit.new_user_ref(domain_id=self.domain_id)
+ user_ref = self.identity_api.create_user(user_ref)
+ group_ref = unit.new_group_ref(domain_id=self.domain_id)
+ group_ref = self.identity_api.create_group(group_ref)
+ self.identity_api.add_user_to_group(user_ref['id'], group_ref['id'])
+ self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group',
+ actor_id=user_ref['id'], actor_type='user',
+ actor_operation='added')
+
+ def test_remove_user_from_group(self):
+ user_ref = unit.new_user_ref(domain_id=self.domain_id)
+ user_ref = self.identity_api.create_user(user_ref)
+ group_ref = unit.new_group_ref(domain_id=self.domain_id)
+ group_ref = self.identity_api.create_group(group_ref)
+ self.identity_api.add_user_to_group(user_ref['id'], group_ref['id'])
+ self.identity_api.remove_user_from_group(user_ref['id'],
+ group_ref['id'])
+ self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group',
+ actor_id=user_ref['id'], actor_type='user',
+ actor_operation='removed')
+
class CADFNotificationsForEntities(NotificationsForEntities):
@@ -638,7 +674,7 @@ class CADFNotificationsForEntities(NotificationsForEntities):
self.config_fixture.config(notification_format='cadf')
def test_initiator_data_is_set(self):
- ref = self.new_domain_ref()
+ ref = unit.new_domain_ref()
resp = self.post('/domains', body={'domain': ref})
resource_id = resp.result.get('domain').get('id')
self._assert_last_audit(resource_id, CREATED_OPERATION, 'domain',
@@ -809,7 +845,7 @@ class TestEventCallbacks(test_v3.RestfulTestCase):
def test_notification_received(self):
callback = register_callback(CREATED_OPERATION, 'project')
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertTrue(callback.called)
@@ -854,7 +890,7 @@ class TestEventCallbacks(test_v3.RestfulTestCase):
callback_called.append(True)
Foo()
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertEqual([True], callback_called)
@@ -877,7 +913,7 @@ class TestEventCallbacks(test_v3.RestfulTestCase):
callback_called.append('cb1')
Foo()
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project_ref['id'], project_ref)
self.assertItemsEqual(['cb1', 'cb0'], callback_called)
@@ -919,7 +955,7 @@ class TestEventCallbacks(test_v3.RestfulTestCase):
# something like:
# self.assertRaises(TypeError, Foo)
Foo()
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
self.assertRaises(TypeError, self.resource_api.create_project,
project_ref['id'], project_ref)
@@ -963,13 +999,13 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
def _assert_last_note(self, action, user_id, event_type=None):
self.assertTrue(self._notifications)
note = self._notifications[-1]
- self.assertEqual(note['action'], action)
+ self.assertEqual(action, note['action'])
initiator = note['initiator']
- self.assertEqual(initiator.id, user_id)
- self.assertEqual(initiator.host.address, self.LOCAL_HOST)
+ self.assertEqual(user_id, initiator.id)
+ self.assertEqual(self.LOCAL_HOST, initiator.host.address)
self.assertTrue(note['send_notification_called'])
if event_type:
- self.assertEqual(note['event_type'], event_type)
+ self.assertEqual(event_type, note['event_type'])
def _assert_event(self, role_id, project=None, domain=None,
user=None, group=None, inherit=False):
@@ -1006,7 +1042,6 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
'id': 'openstack:782689dd-f428-4f13-99c7-5c70f94a5ac1'
}
"""
-
note = self._notifications[-1]
event = note['event']
if project:
@@ -1073,7 +1108,7 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
user=self.user_id)
def test_group_domain_grant(self):
- group_ref = self.new_group_ref(domain_id=self.domain_id)
+ group_ref = unit.new_group_ref(domain_id=self.domain_id)
group = self.identity_api.create_group(group_ref)
self.identity_api.add_user_to_group(self.user_id, group['id'])
url = ('/domains/%s/groups/%s/roles/%s' %
@@ -1087,7 +1122,7 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
# A notification is sent when add_role_to_user_and_project is called on
# the assignment manager.
- project_ref = self.new_project_ref(self.domain_id)
+ project_ref = unit.new_project_ref(self.domain_id)
project = self.resource_api.create_project(
project_ref['id'], project_ref)
tenant_id = project['id']
@@ -1097,7 +1132,7 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
self.assertTrue(self._notifications)
note = self._notifications[-1]
- self.assertEqual(note['action'], 'created.role_assignment')
+ self.assertEqual('created.role_assignment', note['action'])
self.assertTrue(note['send_notification_called'])
self._assert_event(self.role_id, project=tenant_id, user=self.user_id)
@@ -1111,7 +1146,7 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase):
self.assertTrue(self._notifications)
note = self._notifications[-1]
- self.assertEqual(note['action'], 'deleted.role_assignment')
+ self.assertEqual('deleted.role_assignment', note['action'])
self.assertTrue(note['send_notification_called'])
self._assert_event(self.role_id, project=self.project_id,
@@ -1126,7 +1161,9 @@ class TestCallbackRegistration(unit.BaseTestCase):
self.mock_log.logger.getEffectiveLevel.return_value = logging.DEBUG
def verify_log_message(self, data):
- """Tests that use this are a little brittle because adding more
+ """Verify log message.
+
+ Tests that use this are a little brittle because adding more
logging can break them.
TODO(dstanek): remove the need for this in a future refactoring
diff --git a/keystone-moon/keystone/tests/unit/common/test_sql_core.py b/keystone-moon/keystone/tests/unit/common/test_sql_core.py
index b110ed08..7d20eb03 100644
--- a/keystone-moon/keystone/tests/unit/common/test_sql_core.py
+++ b/keystone-moon/keystone/tests/unit/common/test_sql_core.py
@@ -32,14 +32,14 @@ class TestModelDictMixin(unit.BaseTestCase):
def test_creating_a_model_instance_from_a_dict(self):
d = {'id': utils.new_uuid(), 'text': utils.new_uuid()}
m = TestModel.from_dict(d)
- self.assertEqual(m.id, d['id'])
- self.assertEqual(m.text, d['text'])
+ self.assertEqual(d['id'], m.id)
+ self.assertEqual(d['text'], m.text)
def test_creating_a_dict_from_a_model_instance(self):
m = TestModel(id=utils.new_uuid(), text=utils.new_uuid())
d = m.to_dict()
- self.assertEqual(m.id, d['id'])
- self.assertEqual(m.text, d['text'])
+ self.assertEqual(d['id'], m.id)
+ self.assertEqual(d['text'], m.text)
def test_creating_a_model_instance_from_an_invalid_dict(self):
d = {'id': utils.new_uuid(), 'text': utils.new_uuid(), 'extra': None}
@@ -49,4 +49,4 @@ class TestModelDictMixin(unit.BaseTestCase):
expected = {'id': utils.new_uuid(), 'text': utils.new_uuid()}
m = TestModel(id=expected['id'], text=expected['text'])
m.extra = 'this should not be in the dictionary'
- self.assertEqual(m.to_dict(), expected)
+ self.assertEqual(expected, m.to_dict())
diff --git a/keystone-moon/keystone/tests/unit/common/test_utils.py b/keystone-moon/keystone/tests/unit/common/test_utils.py
index d52eb729..3641aacd 100644
--- a/keystone-moon/keystone/tests/unit/common/test_utils.py
+++ b/keystone-moon/keystone/tests/unit/common/test_utils.py
@@ -1,3 +1,4 @@
+# encoding: utf-8
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@@ -16,12 +17,13 @@ import uuid
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslo_serialization import jsonutils
+import six
from keystone.common import utils as common_utils
from keystone import exception
-from keystone import service
from keystone.tests import unit
from keystone.tests.unit import utils
+from keystone.version import service
CONF = cfg.CONF
@@ -36,6 +38,38 @@ class UtilsTestCase(unit.BaseTestCase):
super(UtilsTestCase, self).setUp()
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
+ def test_resource_uuid(self):
+ uuid_str = '536e28c2017e405e89b25a1ed777b952'
+ self.assertEqual(uuid_str, common_utils.resource_uuid(uuid_str))
+
+ # Exact 64 length string.
+ uuid_str = ('536e28c2017e405e89b25a1ed777b952'
+ 'f13de678ac714bb1b7d1e9a007c10db5')
+ resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE
+ transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex
+ self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str))
+
+ # Non-ASCII character test.
+ non_ascii_ = 'ß' * 32
+ transformed_id = uuid.uuid5(resource_id_namespace, non_ascii_).hex
+ self.assertEqual(transformed_id,
+ common_utils.resource_uuid(non_ascii_))
+
+ # This input is invalid because it's length is more than 64.
+ invalid_input = 'x' * 65
+ self.assertRaises(ValueError, common_utils.resource_uuid,
+ invalid_input)
+
+ # 64 length unicode string, to mimic what is returned from mapping_id
+ # backend.
+ uuid_str = six.text_type('536e28c2017e405e89b25a1ed777b952'
+ 'f13de678ac714bb1b7d1e9a007c10db5')
+ resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE
+ if six.PY2:
+ uuid_str = uuid_str.encode('utf-8')
+ transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex
+ self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str))
+
def test_hash(self):
password = 'right'
wrong = 'wrongwrong' # Two wrongs don't make a right
@@ -153,6 +187,18 @@ class UtilsTestCase(unit.BaseTestCase):
expected_json = '{"field":"value"}'
self.assertEqual(expected_json, json)
+ def test_url_safe_check(self):
+ base_str = 'i am safe'
+ self.assertFalse(common_utils.is_not_url_safe(base_str))
+ for i in common_utils.URL_RESERVED_CHARS:
+ self.assertTrue(common_utils.is_not_url_safe(base_str + i))
+
+ def test_url_safe_with_unicode_check(self):
+ base_str = u'i am \xe7afe'
+ self.assertFalse(common_utils.is_not_url_safe(base_str))
+ for i in common_utils.URL_RESERVED_CHARS:
+ self.assertTrue(common_utils.is_not_url_safe(base_str + i))
+
class ServiceHelperTests(unit.BaseTestCase):
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
index 2097b68b..96a0ffa9 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf
@@ -1,5 +1,5 @@
[database]
-#For a specific location file based sqlite use:
+#For a specific location file based SQLite use:
#connection = sqlite:////tmp/keystone.db
#To Test MySQL:
#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf
index 59cb8577..bb9ee08f 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf
@@ -4,11 +4,7 @@ user = cn=Manager,dc=openstack,dc=org
password = test
suffix = dc=openstack,dc=org
group_tree_dn = ou=UserGroups,dc=openstack,dc=org
-role_tree_dn = ou=Roles,dc=openstack,dc=org
-project_tree_dn = ou=Projects,dc=openstack,dc=org
user_tree_dn = ou=Users,dc=openstack,dc=org
-project_enabled_emulation = True
user_enabled_emulation = True
user_mail_attribute = mail
use_dumb_member = True
-
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
index 142ca203..2495f036 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf
@@ -1,4 +1,4 @@
-#Used for running the Migrate tests against a live Mysql Server
+#Used for running the Migrate tests against a live MySQL Server
#See _sql_livetest.py
[database]
connection = mysql+pymysql://keystone:keystone@localhost/keystone_test?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf
index a85f5226..c36e05f9 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf
@@ -4,10 +4,7 @@ user = cn=Manager,dc=openstack,dc=org
password = test
suffix = dc=openstack,dc=org
group_tree_dn = ou=UserGroups,dc=openstack,dc=org
-role_tree_dn = ou=Roles,dc=openstack,dc=org
-project_tree_dn = ou=Projects,dc=openstack,dc=org
user_tree_dn = ou=Users,dc=openstack,dc=org
-project_enabled_emulation = True
user_enabled_emulation = True
user_mail_attribute = mail
use_dumb_member = True
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
index 063177bd..f2828e2e 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf
@@ -1,5 +1,5 @@
[database]
-#For a specific location file based sqlite use:
+#For a specific location file based SQLite use:
#connection = sqlite:////tmp/keystone.db
#To Test MySQL:
#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8
diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf
index d35b9139..b66044b7 100644
--- a/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf
@@ -4,10 +4,7 @@ user = dc=Manager,dc=openstack,dc=org
password = test
suffix = dc=openstack,dc=org
group_tree_dn = ou=UserGroups,dc=openstack,dc=org
-role_tree_dn = ou=Roles,dc=openstack,dc=org
-project_tree_dn = ou=Projects,dc=openstack,dc=org
user_tree_dn = ou=Users,dc=openstack,dc=org
-project_enabled_emulation = True
user_enabled_emulation = True
user_mail_attribute = mail
use_dumb_member = True
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
index 2dd86c25..64d01d48 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf
@@ -11,4 +11,4 @@ password = password
suffix = cn=example,cn=com
[identity]
-driver = ldap \ No newline at end of file
+driver = ldap
diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
index ba22cdf9..af540537 100644
--- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
+++ b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf
@@ -8,4 +8,5 @@ password = password
suffix = cn=example,cn=com
[identity]
-driver = ldap \ No newline at end of file
+driver = ldap
+list_limit = 101
diff --git a/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py b/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py
index 5804f1c0..52a6095b 100644
--- a/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py
+++ b/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py
@@ -12,13 +12,20 @@
import uuid
+from oslo_config import cfg
+from oslo_config import fixture as config_fixture
+from oslo_serialization import jsonutils
+
from keystone.auth.plugins import mapped
-from keystone.contrib.federation import utils as mapping_utils
from keystone import exception
+from keystone.federation import utils as mapping_utils
from keystone.tests import unit
from keystone.tests.unit import mapping_fixtures
+FAKE_MAPPING_ID = uuid.uuid4().hex
+
+
class MappingRuleEngineTests(unit.BaseTestCase):
"""A class for testing the mapping rule engine."""
@@ -50,10 +57,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
a direct mapping for the users name.
"""
-
mapping = mapping_fixtures.MAPPING_LARGE
assertion = mapping_fixtures.ADMIN_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
values = rp.process(assertion)
fn = assertion.get('FirstName')
@@ -71,18 +77,15 @@ class MappingRuleEngineTests(unit.BaseTestCase):
This will not match since the email in the assertion will fail
the regex test. It is set to match any @example.com address.
But the incoming value is set to eviltester@example.org.
- RuleProcessor should return list of empty group_ids.
+ RuleProcessor should raise ValidationError.
"""
-
mapping = mapping_fixtures.MAPPING_LARGE
assertion = mapping_fixtures.BAD_TESTER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
-
- self.assertValidMappedUserObject(mapped_properties)
- self.assertIsNone(mapped_properties['user'].get('name'))
- self.assertListEqual(list(), mapped_properties['group_ids'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
+ self.assertRaises(exception.ValidationError,
+ rp.process,
+ assertion)
def test_rule_engine_regex_many_groups(self):
"""Should return group CONTRACTOR_GROUP_ID.
@@ -93,10 +96,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
a match.
"""
-
mapping = mapping_fixtures.MAPPING_TESTER_REGEX
assertion = mapping_fixtures.TESTER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
@@ -116,10 +118,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
mapping.
"""
-
mapping = mapping_fixtures.MAPPING_SMALL
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
@@ -138,10 +139,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
has `not_any_of`, and direct mapping to a username, no group.
"""
-
mapping = mapping_fixtures.MAPPING_LARGE
assertion = mapping_fixtures.CUSTOMER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
@@ -160,10 +160,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
rules must be matched, including a `not_any_of`.
"""
-
mapping = mapping_fixtures.MAPPING_SMALL
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
@@ -183,10 +182,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
regex set to True.
"""
-
mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
assertion = mapping_fixtures.DEVELOPER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
values = rp.process(assertion)
self.assertValidMappedUserObject(values)
@@ -203,18 +201,15 @@ class MappingRuleEngineTests(unit.BaseTestCase):
The email in the assertion will fail the regex test.
It is set to reject any @example.org address, but the
incoming value is set to evildeveloper@example.org.
- RuleProcessor should return list of empty group_ids.
+ RuleProcessor should yield ValidationError.
"""
-
mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX
assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- mapped_properties = rp.process(assertion)
-
- self.assertValidMappedUserObject(mapped_properties)
- self.assertIsNone(mapped_properties['user'].get('name'))
- self.assertListEqual(list(), mapped_properties['group_ids'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
+ self.assertRaises(exception.ValidationError,
+ rp.process,
+ assertion)
def _rule_engine_regex_match_and_many_groups(self, assertion):
"""Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID.
@@ -223,9 +218,8 @@ class MappingRuleEngineTests(unit.BaseTestCase):
Expect DEVELOPER_GROUP_ID and TESTER_GROUP_ID in the results.
"""
-
mapping = mapping_fixtures.MAPPING_LARGE
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
values = rp.process(assertion)
user_name = assertion.get('UserName')
@@ -265,16 +259,29 @@ class MappingRuleEngineTests(unit.BaseTestCase):
Expect RuleProcessor to discard non string object, which
is required for a correct rule match. RuleProcessor will result with
- empty list of groups.
+ ValidationError.
"""
mapping = mapping_fixtures.MAPPING_SMALL
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertValidMappedUserObject(mapped_properties)
- self.assertIsNone(mapped_properties['user'].get('name'))
- self.assertListEqual(list(), mapped_properties['group_ids'])
+ self.assertRaises(exception.ValidationError,
+ rp.process,
+ assertion)
+
+ def test_using_remote_direct_mapping_that_doesnt_exist_fails(self):
+ """Test for the correct error when referring to a bad remote match.
+
+ The remote match must exist in a rule when a local section refers to
+ a remote matching using the format (e.g. {0} in a local section).
+ """
+ mapping = mapping_fixtures.MAPPING_DIRECT_MAPPING_THROUGH_KEYWORD
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
+ assertion = mapping_fixtures.CUSTOMER_ASSERTION
+
+ self.assertRaises(exception.DirectMappingError,
+ rp.process,
+ assertion)
def test_rule_engine_returns_group_names(self):
"""Check whether RuleProcessor returns group names with their domains.
@@ -285,7 +292,7 @@ class MappingRuleEngineTests(unit.BaseTestCase):
"""
mapping = mapping_fixtures.MAPPING_GROUP_NAMES
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
@@ -317,10 +324,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
correctly filters out Manager and only allows Developer and Contractor.
"""
-
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
@@ -354,10 +360,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
correctly filters out Manager and Developer and only allows Contractor.
"""
-
mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
@@ -383,10 +388,9 @@ class MappingRuleEngineTests(unit.BaseTestCase):
entry in the remote rules.
"""
-
mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MULTIPLES
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
@@ -412,7 +416,7 @@ class MappingRuleEngineTests(unit.BaseTestCase):
"""
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
self.assertRaises(exception.ValidationError, rp.process, assertion)
def test_rule_engine_blacklist_direct_group_mapping_missing_domain(self):
@@ -423,7 +427,7 @@ class MappingRuleEngineTests(unit.BaseTestCase):
"""
mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN
assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
self.assertRaises(exception.ValidationError, rp.process, assertion)
def test_rule_engine_no_groups_allowed(self):
@@ -436,7 +440,7 @@ class MappingRuleEngineTests(unit.BaseTestCase):
"""
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertListEqual(mapped_properties['group_names'], [])
@@ -444,41 +448,19 @@ class MappingRuleEngineTests(unit.BaseTestCase):
self.assertEqual('tbo', mapped_properties['user']['name'])
def test_mapping_federated_domain_specified(self):
- """Test mapping engine when domain 'ephemeral' is explicitely set.
+ """Test mapping engine when domain 'ephemeral' is explicitly set.
For that, we use mapping rule MAPPING_EPHEMERAL_USER and assertion
EMPLOYEE_ASSERTION
"""
mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
- def test_create_user_object_with_bad_mapping(self):
- """Test if user object is created even with bad mapping.
-
- User objects will be created by mapping engine always as long as there
- is corresponding local rule. This test shows, that even with assertion
- where no group names nor ids are matched, but there is 'blind' rule for
- mapping user, such object will be created.
-
- In this test MAPPING_EHPEMERAL_USER expects UserName set to jsmith
- whereas value from assertion is 'tbo'.
-
- """
- mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER
- rp = mapping_utils.RuleProcessor(mapping['rules'])
- assertion = mapping_fixtures.CONTRACTOR_ASSERTION
- mapped_properties = rp.process(assertion)
- self.assertIsNotNone(mapped_properties)
- self.assertValidMappedUserObject(mapped_properties)
-
- self.assertNotIn('id', mapped_properties['user'])
- self.assertNotIn('name', mapped_properties['user'])
-
def test_set_ephemeral_domain_to_ephemeral_users(self):
"""Test auto assigning service domain to ephemeral users.
@@ -488,7 +470,7 @@ class MappingRuleEngineTests(unit.BaseTestCase):
"""
mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
@@ -497,7 +479,7 @@ class MappingRuleEngineTests(unit.BaseTestCase):
def test_local_user_local_domain(self):
"""Test that local users can have non-service domains assigned."""
mapping = mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
@@ -514,19 +496,21 @@ class MappingRuleEngineTests(unit.BaseTestCase):
- Check if the user has proper domain ('federated') set
- Check if the user has property type set ('ephemeral')
- Check if user's name is properly mapped from the assertion
- - Check if user's id is properly set and equal to name, as it was not
- explicitely specified in the mapping.
+ - Check if unique_id is properly set and equal to display_name,
+ as it was not explicitly specified in the mapping.
"""
mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.CONTRACTOR_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username({}, mapped_properties)
- self.assertEqual('jsmith', mapped_properties['user']['id'])
self.assertEqual('jsmith', mapped_properties['user']['name'])
+ unique_id, display_name = mapped.get_user_unique_id_and_display_name(
+ {}, mapped_properties)
+ self.assertEqual('jsmith', unique_id)
+ self.assertEqual('jsmith', display_name)
def test_user_identifications_name_and_federated_domain(self):
"""Test varius mapping options and how users are identified.
@@ -537,20 +521,19 @@ class MappingRuleEngineTests(unit.BaseTestCase):
- Check if the user has proper domain ('federated') set
- Check if the user has propert type set ('ephemeral')
- Check if user's name is properly mapped from the assertion
- - Check if user's id is properly set and equal to name, as it was not
- explicitely specified in the mapping.
+ - Check if the unique_id and display_name are properly set
"""
mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.EMPLOYEE_ASSERTION
mapped_properties = rp.process(assertion)
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username({}, mapped_properties)
- self.assertEqual('tbo', mapped_properties['user']['name'])
- self.assertEqual('abc123%40example.com',
- mapped_properties['user']['id'])
+ unique_id, display_name = mapped.get_user_unique_id_and_display_name(
+ {}, mapped_properties)
+ self.assertEqual('tbo', display_name)
+ self.assertEqual('abc123%40example.com', unique_id)
def test_user_identification_id(self):
"""Test varius mapping options and how users are identified.
@@ -560,21 +543,21 @@ class MappingRuleEngineTests(unit.BaseTestCase):
Test plan:
- Check if the user has proper domain ('federated') set
- Check if the user has propert type set ('ephemeral')
- - Check if user's id is properly mapped from the assertion
- - Check if user's name is properly set and equal to id, as it was not
- explicitely specified in the mapping.
+ - Check if user's display_name is properly set and equal to unique_id,
+ as it was not explicitly specified in the mapping.
"""
mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.ADMIN_ASSERTION
mapped_properties = rp.process(assertion)
context = {'environment': {}}
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username(context, mapped_properties)
- self.assertEqual('bob', mapped_properties['user']['name'])
- self.assertEqual('bob', mapped_properties['user']['id'])
+ unique_id, display_name = mapped.get_user_unique_id_and_display_name(
+ context, mapped_properties)
+ self.assertEqual('bob', unique_id)
+ self.assertEqual('bob', display_name)
def test_user_identification_id_and_name(self):
"""Test varius mapping options and how users are identified.
@@ -584,8 +567,8 @@ class MappingRuleEngineTests(unit.BaseTestCase):
Test plan:
- Check if the user has proper domain ('federated') set
- Check if the user has proper type set ('ephemeral')
- - Check if user's name is properly mapped from the assertion
- - Check if user's id is properly set and and equal to value hardcoded
+ - Check if display_name is properly set from the assertion
+ - Check if unique_id is properly set and and equal to value hardcoded
in the mapping
This test does two iterations with different assertions used as input
@@ -601,19 +584,21 @@ class MappingRuleEngineTests(unit.BaseTestCase):
(mapping_fixtures.EMPLOYEE_ASSERTION, 'tbo')]
for assertion, exp_user_name in testcases:
mapping = mapping_fixtures.MAPPING_USER_IDS
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
mapped_properties = rp.process(assertion)
context = {'environment': {}}
self.assertIsNotNone(mapped_properties)
self.assertValidMappedUserObject(mapped_properties)
- mapped.setup_username(context, mapped_properties)
- self.assertEqual(exp_user_name, mapped_properties['user']['name'])
- self.assertEqual('abc123%40example.com',
- mapped_properties['user']['id'])
+ unique_id, display_name = (
+ mapped.get_user_unique_id_and_display_name(context,
+ mapped_properties)
+ )
+ self.assertEqual(exp_user_name, display_name)
+ self.assertEqual('abc123%40example.com', unique_id)
def test_whitelist_pass_through(self):
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = mapping_fixtures.DEVELOPER_ASSERTION
mapped_properties = rp.process(assertion)
self.assertValidMappedUserObject(mapped_properties)
@@ -622,13 +607,119 @@ class MappingRuleEngineTests(unit.BaseTestCase):
self.assertEqual('Developer',
mapped_properties['group_names'][0]['name'])
+ def test_mapping_with_incorrect_local_keys(self):
+ mapping = mapping_fixtures.MAPPING_BAD_LOCAL_SETUP
+ self.assertRaises(exception.ValidationError,
+ mapping_utils.validate_mapping_structure,
+ mapping)
+
+ def test_mapping_with_group_name_and_domain(self):
+ mapping = mapping_fixtures.MAPPING_GROUP_NAMES
+ mapping_utils.validate_mapping_structure(mapping)
+
def test_type_not_in_assertion(self):
"""Test that if the remote "type" is not in the assertion it fails."""
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH
- rp = mapping_utils.RuleProcessor(mapping['rules'])
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
assertion = {uuid.uuid4().hex: uuid.uuid4().hex}
+ self.assertRaises(exception.ValidationError,
+ rp.process,
+ assertion)
+
+ def test_rule_engine_group_ids_mapping_whitelist(self):
+ """Test mapping engine when group_ids is explicitly set
+
+ Also test whitelists on group ids
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_IDS_WHITELIST
+ assertion = mapping_fixtures.GROUP_IDS_ASSERTION
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
mapped_properties = rp.process(assertion)
- self.assertValidMappedUserObject(mapped_properties)
+ self.assertIsNotNone(mapped_properties)
+ self.assertEqual('opilotte', mapped_properties['user']['name'])
+ self.assertListEqual([], mapped_properties['group_names'])
+ self.assertItemsEqual(['abc123', 'ghi789', 'klm012'],
+ mapped_properties['group_ids'])
- self.assertNotIn('id', mapped_properties['user'])
- self.assertNotIn('name', mapped_properties['user'])
+ def test_rule_engine_group_ids_mapping_blacklist(self):
+ """Test mapping engine when group_ids is explicitly set.
+
+ Also test blacklists on group ids
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_IDS_BLACKLIST
+ assertion = mapping_fixtures.GROUP_IDS_ASSERTION
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertEqual('opilotte', mapped_properties['user']['name'])
+ self.assertListEqual([], mapped_properties['group_names'])
+ self.assertItemsEqual(['abc123', 'ghi789', 'klm012'],
+ mapped_properties['group_ids'])
+
+ def test_rule_engine_group_ids_mapping_only_one_group(self):
+ """Test mapping engine when group_ids is explicitly set.
+
+ If the group ids list has only one group,
+ test if the transformation is done correctly
+
+ """
+ mapping = mapping_fixtures.MAPPING_GROUPS_IDS_WHITELIST
+ assertion = mapping_fixtures.GROUP_IDS_ASSERTION_ONLY_ONE_GROUP
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
+ mapped_properties = rp.process(assertion)
+ self.assertIsNotNone(mapped_properties)
+ self.assertEqual('opilotte', mapped_properties['user']['name'])
+ self.assertListEqual([], mapped_properties['group_names'])
+ self.assertItemsEqual(['210mlk', '321cba'],
+ mapped_properties['group_ids'])
+
+
+class TestUnicodeAssertionData(unit.BaseTestCase):
+ """Ensure that unicode data in the assertion headers works.
+
+ Bug #1525250 reported that something was not getting correctly encoded
+ and/or decoded when assertion data contained non-ASCII characters.
+
+ This test class mimics what happens in a real HTTP request.
+ """
+
+ def setUp(self):
+ super(TestUnicodeAssertionData, self).setUp()
+ self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF))
+ self.config_fixture.config(group='federation',
+ assertion_prefix='PFX')
+
+ def _pull_mapping_rules_from_the_database(self):
+ # NOTE(dstanek): In a live system. The rules are dumped into JSON bytes
+ # before being # stored in the database. Upon retrieval the bytes are
+ # loaded and the resulting dictionary is full of unicode text strings.
+ # Most of tests in this file incorrectly assume the mapping fixture
+ # dictionary is the same as what it would look like coming out of the
+ # database. The string, when coming out of the database, are all text.
+ return jsonutils.loads(jsonutils.dumps(
+ mapping_fixtures.MAPPING_UNICODE))
+
+ def _pull_assertion_from_the_request_headers(self):
+ # NOTE(dstanek): In a live system the bytes for the assertion are
+ # pulled from the HTTP headers. These bytes may be decodable as
+ # ISO-8859-1 according to Section 3.2.4 of RFC 7230. Let's assume
+ # that our web server plugins are correctly encoding the data.
+ context = dict(environment=mapping_fixtures.UNICODE_NAME_ASSERTION)
+ data = mapping_utils.get_assertion_params_from_env(context)
+ # NOTE(dstanek): keystone.auth.plugins.mapped
+ return dict(data)
+
+ def test_unicode(self):
+ mapping = self._pull_mapping_rules_from_the_database()
+ assertion = self._pull_assertion_from_the_request_headers()
+
+ rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules'])
+ values = rp.process(assertion)
+
+ fn = assertion.get('PFX_FirstName')
+ ln = assertion.get('PFX_LastName')
+ full_name = '%s %s' % (fn, ln)
+ user_name = values.get('user', {}).get('name')
+ self.assertEqual(full_name, user_name)
diff --git a/keystone-moon/keystone/tests/unit/core.py b/keystone-moon/keystone/tests/unit/core.py
index eb8b9f65..1054e131 100644
--- a/keystone-moon/keystone/tests/unit/core.py
+++ b/keystone-moon/keystone/tests/unit/core.py
@@ -14,8 +14,11 @@
from __future__ import absolute_import
import atexit
+import base64
import datetime
import functools
+import hashlib
+import json
import logging
import os
import re
@@ -28,14 +31,16 @@ import warnings
import fixtures
from oslo_config import cfg
from oslo_config import fixture as config_fixture
+from oslo_context import context as oslo_context
+from oslo_context import fixture as oslo_ctx_fixture
from oslo_log import fixture as log_fixture
from oslo_log import log
from oslo_utils import timeutils
-import oslotest.base as oslotest
from oslotest import mockpatch
from paste.deploy import loadwsgi
import six
from sqlalchemy import exc
+import testtools
from testtools import testcase
# NOTE(ayoung)
@@ -45,24 +50,20 @@ from keystone.common import environment # noqa
environment.use_eventlet()
from keystone import auth
-from keystone.common import config as common_cfg
+from keystone.common import config
from keystone.common import dependency
-from keystone.common import kvs
from keystone.common.kvs import core as kvs_core
from keystone.common import sql
-from keystone import config
-from keystone import controllers
from keystone import exception
from keystone import notifications
-from keystone.policy.backends import rules
from keystone.server import common
-from keystone import service
from keystone.tests.unit import ksfixtures
+from keystone.version import controllers
+from keystone.version import service
config.configure()
-LOG = log.getLogger(__name__)
PID = six.text_type(os.getpid())
TESTSDIR = os.path.dirname(os.path.abspath(__file__))
TESTCONF = os.path.join(TESTSDIR, 'config_files')
@@ -82,7 +83,6 @@ TMPDIR = _calc_tmpdir()
CONF = cfg.CONF
log.register_options(CONF)
-rules.init()
IN_MEM_DB_CONN_STRING = 'sqlite://'
@@ -208,6 +208,22 @@ def skip_if_cache_disabled(*sections):
return wrapper
+def skip_if_cache_is_enabled(*sections):
+ def wrapper(f):
+ @functools.wraps(f)
+ def inner(*args, **kwargs):
+ if CONF.cache.enabled:
+ for s in sections:
+ conf_sec = getattr(CONF, s, None)
+ if conf_sec is not None:
+ if getattr(conf_sec, 'caching', True):
+ raise testcase.TestSkipped('%s caching enabled.' %
+ s)
+ return f(*args, **kwargs)
+ return inner
+ return wrapper
+
+
def skip_if_no_multiple_domains_support(f):
"""Decorator to skip tests for identity drivers limited to one domain."""
@functools.wraps(f)
@@ -223,113 +239,230 @@ class UnexpectedExit(Exception):
pass
-def new_ref():
- """Populates a ref with attributes common to some API entities."""
- return {
+def new_region_ref(parent_region_id=None, **kwargs):
+ ref = {
'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
- 'enabled': True}
-
+ 'parent_region_id': parent_region_id}
-def new_region_ref():
- ref = new_ref()
- # Region doesn't have name or enabled.
- del ref['name']
- del ref['enabled']
- ref['parent_region_id'] = None
+ ref.update(kwargs)
return ref
-def new_service_ref():
- ref = new_ref()
- ref['type'] = uuid.uuid4().hex
+def new_service_ref(**kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'type': uuid.uuid4().hex,
+ }
+ ref.update(kwargs)
return ref
-def new_endpoint_ref(service_id, interface='public', default_region_id=None,
- **kwargs):
- ref = new_ref()
- del ref['enabled'] # enabled is optional
- ref['interface'] = interface
- ref['service_id'] = service_id
- ref['url'] = 'https://' + uuid.uuid4().hex + '.com'
- ref['region_id'] = default_region_id
+NEEDS_REGION_ID = object()
+
+
+def new_endpoint_ref(service_id, interface='public',
+ region_id=NEEDS_REGION_ID, **kwargs):
+
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'interface': interface,
+ 'service_id': service_id,
+ 'url': 'https://' + uuid.uuid4().hex + '.com',
+ }
+
+ if region_id is NEEDS_REGION_ID:
+ ref['region_id'] = uuid.uuid4().hex
+ elif region_id is None and kwargs.get('region') is not None:
+ # pre-3.2 form endpoints are not supported by this function
+ raise NotImplementedError("use new_endpoint_ref_with_region")
+ else:
+ ref['region_id'] = region_id
ref.update(kwargs)
return ref
-def new_domain_ref():
- ref = new_ref()
+def new_endpoint_ref_with_region(service_id, region, interface='public',
+ **kwargs):
+ """Define an endpoint_ref having a pre-3.2 form.
+
+ Contains the deprecated 'region' instead of 'region_id'.
+ """
+ ref = new_endpoint_ref(service_id, interface, region=region,
+ region_id='invalid', **kwargs)
+ del ref['region_id']
return ref
-def new_project_ref(domain_id=None, parent_id=None, is_domain=False):
- ref = new_ref()
- ref['domain_id'] = domain_id
- ref['parent_id'] = parent_id
- ref['is_domain'] = is_domain
+def new_domain_ref(**kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True
+ }
+ ref.update(kwargs)
+ return ref
+
+
+def new_project_ref(domain_id=None, is_domain=False, **kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'domain_id': domain_id,
+ 'is_domain': is_domain,
+ }
+ # NOTE(henry-nash): We don't include parent_id in the initial list above
+ # since specifying it is optional depending on where the project sits in
+ # the hierarchy (and a parent_id of None has meaning - i.e. it's a top
+ # level project).
+ ref.update(kwargs)
return ref
-def new_user_ref(domain_id, project_id=None):
- ref = new_ref()
- ref['domain_id'] = domain_id
- ref['email'] = uuid.uuid4().hex
- ref['password'] = uuid.uuid4().hex
+def new_user_ref(domain_id, project_id=None, **kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True,
+ 'domain_id': domain_id,
+ 'email': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex,
+ }
if project_id:
ref['default_project_id'] = project_id
+ ref.update(kwargs)
return ref
-def new_group_ref(domain_id):
- ref = new_ref()
- ref['domain_id'] = domain_id
+def new_federated_user_ref(idp_id=None, protocol_id=None, **kwargs):
+ ref = {
+ 'idp_id': idp_id or 'ORG_IDP',
+ 'protocol_id': protocol_id or 'saml2',
+ 'unique_id': uuid.uuid4().hex,
+ 'display_name': uuid.uuid4().hex,
+ }
+ ref.update(kwargs)
return ref
-def new_credential_ref(user_id, project_id=None, cred_type=None):
- ref = dict()
- ref['id'] = uuid.uuid4().hex
- ref['user_id'] = user_id
- if cred_type == 'ec2':
- ref['type'] = 'ec2'
- ref['blob'] = uuid.uuid4().hex
- else:
- ref['type'] = 'cert'
- ref['blob'] = uuid.uuid4().hex
+def new_group_ref(domain_id, **kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'domain_id': domain_id
+ }
+ ref.update(kwargs)
+ return ref
+
+
+def new_credential_ref(user_id, project_id=None, type='cert', **kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'user_id': user_id,
+ 'type': type,
+ }
+
if project_id:
ref['project_id'] = project_id
+ if 'blob' not in kwargs:
+ ref['blob'] = uuid.uuid4().hex
+
+ ref.update(kwargs)
return ref
-def new_role_ref():
- ref = new_ref()
- # Roles don't have a description or the enabled flag
- del ref['description']
- del ref['enabled']
+def new_cert_credential(user_id, project_id=None, blob=None, **kwargs):
+ if blob is None:
+ blob = {'access': uuid.uuid4().hex, 'secret': uuid.uuid4().hex}
+
+ credential = new_credential_ref(user_id=user_id,
+ project_id=project_id,
+ blob=json.dumps(blob),
+ type='cert',
+ **kwargs)
+ return blob, credential
+
+
+def new_ec2_credential(user_id, project_id=None, blob=None, **kwargs):
+ if blob is None:
+ blob = {
+ 'access': uuid.uuid4().hex,
+ 'secret': uuid.uuid4().hex,
+ 'trust_id': None
+ }
+
+ if 'id' not in kwargs:
+ access = blob['access'].encode('utf-8')
+ kwargs['id'] = hashlib.sha256(access).hexdigest()
+
+ credential = new_credential_ref(user_id=user_id,
+ project_id=project_id,
+ blob=json.dumps(blob),
+ type='ec2',
+ **kwargs)
+ return blob, credential
+
+
+def new_totp_credential(user_id, project_id=None, blob=None):
+ if not blob:
+ blob = base64.b32encode(uuid.uuid4().hex).rstrip('=')
+ credential = new_credential_ref(user_id=user_id,
+ project_id=project_id,
+ blob=blob,
+ type='totp')
+ return credential
+
+
+def new_role_ref(**kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'domain_id': None
+ }
+ ref.update(kwargs)
return ref
-def new_policy_ref():
- ref = new_ref()
- ref['blob'] = uuid.uuid4().hex
- ref['type'] = uuid.uuid4().hex
+def new_policy_ref(**kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ # Store serialized JSON data as the blob to mimic real world usage.
+ 'blob': json.dumps({'data': uuid.uuid4().hex, }),
+ 'type': uuid.uuid4().hex,
+ }
+
+ ref.update(kwargs)
return ref
def new_trust_ref(trustor_user_id, trustee_user_id, project_id=None,
impersonation=None, expires=None, role_ids=None,
role_names=None, remaining_uses=None,
- allow_redelegation=False):
- ref = dict()
- ref['id'] = uuid.uuid4().hex
- ref['trustor_user_id'] = trustor_user_id
- ref['trustee_user_id'] = trustee_user_id
- ref['impersonation'] = impersonation or False
- ref['project_id'] = project_id
- ref['remaining_uses'] = remaining_uses
- ref['allow_redelegation'] = allow_redelegation
+ allow_redelegation=False, redelegation_count=None, **kwargs):
+ ref = {
+ 'id': uuid.uuid4().hex,
+ 'trustor_user_id': trustor_user_id,
+ 'trustee_user_id': trustee_user_id,
+ 'impersonation': impersonation or False,
+ 'project_id': project_id,
+ 'remaining_uses': remaining_uses,
+ 'allow_redelegation': allow_redelegation,
+ }
+
+ if isinstance(redelegation_count, int):
+ ref.update(redelegation_count=redelegation_count)
if isinstance(expires, six.string_types):
ref['expires_at'] = expires
@@ -351,10 +484,25 @@ def new_trust_ref(trustor_user_id, trustee_user_id, project_id=None,
for role_name in role_names:
ref['roles'].append({'name': role_name})
+ ref.update(kwargs)
return ref
-class BaseTestCase(oslotest.BaseTestCase):
+def create_user(api, domain_id, **kwargs):
+ """Create a user via the API. Keep the created password.
+
+ The password is saved and restored when api.create_user() is called.
+ Only use this routine if there is a requirement for the user object to
+ have a valid password after api.create_user() is called.
+ """
+ user = new_user_ref(domain_id=domain_id, **kwargs)
+ password = user['password']
+ user = api.create_user(user)
+ user['password'] = password
+ return user
+
+
+class BaseTestCase(testtools.TestCase):
"""Light weight base test class.
This is a placeholder that will eventually go away once the
@@ -365,6 +513,10 @@ class BaseTestCase(oslotest.BaseTestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
+
+ self.useFixture(fixtures.NestedTempfile())
+ self.useFixture(fixtures.TempHomeDir())
+
self.useFixture(mockpatch.PatchObject(sys, 'exit',
side_effect=UnexpectedExit))
self.useFixture(log_fixture.get_logging_handle_error_fixture())
@@ -373,6 +525,10 @@ class BaseTestCase(oslotest.BaseTestCase):
module='^keystone\\.')
warnings.simplefilter('error', exc.SAWarning)
self.addCleanup(warnings.resetwarnings)
+ # Ensure we have an empty threadlocal context at the start of each
+ # test.
+ self.assertIsNone(oslo_context.get_current())
+ self.useFixture(oslo_ctx_fixture.ClearRequestContext())
def cleanup_instance(self, *names):
"""Create a function suitable for use with self.addCleanup.
@@ -395,6 +551,9 @@ class TestCase(BaseTestCase):
def config_files(self):
return []
+ def _policy_fixture(self):
+ return ksfixtures.Policy(dirs.etc('policy.json'), self.config_fixture)
+
def config_overrides(self):
# NOTE(morganfainberg): enforce config_overrides can only ever be
# called a single time.
@@ -403,18 +562,19 @@ class TestCase(BaseTestCase):
signing_certfile = 'examples/pki/certs/signing_cert.pem'
signing_keyfile = 'examples/pki/private/signing_key.pem'
- self.config_fixture.config(group='oslo_policy',
- policy_file=dirs.etc('policy.json'))
+
+ self.useFixture(self._policy_fixture())
+
self.config_fixture.config(
# TODO(morganfainberg): Make Cache Testing a separate test case
# in tempest, and move it out of the base unit tests.
group='cache',
backend='dogpile.cache.memory',
enabled=True,
- proxies=['keystone.tests.unit.test_cache.CacheIsolatingProxy'])
+ proxies=['oslo_cache.testing.CacheIsolatingProxy'])
self.config_fixture.config(
group='catalog',
- driver='templated',
+ driver='sql',
template_file=dirs.tests('default_catalog.templates'))
self.config_fixture.config(
group='kvs',
@@ -422,7 +582,6 @@ class TestCase(BaseTestCase):
('keystone.tests.unit.test_kvs.'
'KVSBackendForcedKeyMangleFixture'),
'keystone.tests.unit.test_kvs.KVSBackendFixture'])
- self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='signing', certfile=signing_certfile,
keyfile=signing_keyfile,
@@ -444,17 +603,15 @@ class TestCase(BaseTestCase):
'routes.middleware=INFO',
'stevedore.extension=INFO',
'keystone.notifications=INFO',
- 'keystone.common._memcache_pool=INFO',
'keystone.common.ldap=INFO',
])
self.auth_plugin_config_override()
def auth_plugin_config_override(self, methods=None, **method_classes):
- if methods is not None:
- self.config_fixture.config(group='auth', methods=methods)
- common_cfg.setup_authentication()
- if method_classes:
- self.config_fixture.config(group='auth', **method_classes)
+ self.useFixture(
+ ksfixtures.ConfigAuthPlugins(self.config_fixture,
+ methods,
+ **method_classes))
def _assert_config_overrides_called(self):
assert self.__config_overrides_called is True
@@ -462,6 +619,7 @@ class TestCase(BaseTestCase):
def setUp(self):
super(TestCase, self).setUp()
self.__config_overrides_called = False
+ self.__load_backends_called = False
self.addCleanup(CONF.reset)
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
self.addCleanup(delattr, self, 'config_fixture')
@@ -473,9 +631,10 @@ class TestCase(BaseTestCase):
def mocked_register_auth_plugin_opt(conf, opt):
self.config_fixture.register_opt(opt, group='auth')
self.useFixture(mockpatch.PatchObject(
- common_cfg, '_register_auth_plugin_opt',
+ config, '_register_auth_plugin_opt',
new=mocked_register_auth_plugin_opt))
+ self.sql_driver_version_overrides = {}
self.config_overrides()
# NOTE(morganfainberg): ensure config_overrides has been called.
self.addCleanup(self._assert_config_overrides_called)
@@ -498,8 +657,6 @@ class TestCase(BaseTestCase):
# tests aren't used.
self.addCleanup(dependency.reset)
- self.addCleanup(kvs.INMEMDB.clear)
-
# Ensure Notification subscriptions and resource types are empty
self.addCleanup(notifications.clear_subscribers)
self.addCleanup(notifications.reset_notifier)
@@ -515,7 +672,6 @@ class TestCase(BaseTestCase):
def load_backends(self):
"""Initializes each manager and assigns them to an attribute."""
-
# TODO(blk-u): Shouldn't need to clear the registry here, but some
# tests call load_backends multiple times. These should be fixed to
# only call load_backends once.
@@ -541,7 +697,7 @@ class TestCase(BaseTestCase):
This is useful to load managers initialized by extensions. No extra
backends are loaded by default.
- :return: dict of name -> manager
+ :returns: dict of name -> manager
"""
return {}
@@ -573,7 +729,8 @@ class TestCase(BaseTestCase):
fixtures_to_cleanup.append(attrname)
for tenant in fixtures.TENANTS:
- if hasattr(self, 'tenant_%s' % tenant['id']):
+ tenant_attr_name = 'tenant_%s' % tenant['name'].lower()
+ if hasattr(self, tenant_attr_name):
try:
# This will clear out any roles on the project as well
self.resource_api.delete_project(tenant['id'])
@@ -582,9 +739,8 @@ class TestCase(BaseTestCase):
rv = self.resource_api.create_project(
tenant['id'], tenant)
- attrname = 'tenant_%s' % tenant['id']
- setattr(self, attrname, rv)
- fixtures_to_cleanup.append(attrname)
+ setattr(self, tenant_attr_name, rv)
+ fixtures_to_cleanup.append(tenant_attr_name)
for role in fixtures.ROLES:
try:
@@ -625,6 +781,17 @@ class TestCase(BaseTestCase):
setattr(self, attrname, user_copy)
fixtures_to_cleanup.append(attrname)
+ for role_assignment in fixtures.ROLE_ASSIGNMENTS:
+ role_id = role_assignment['role_id']
+ user = role_assignment['user']
+ tenant_id = role_assignment['tenant_id']
+ user_id = getattr(self, 'user_%s' % user)['id']
+ try:
+ self.assignment_api.add_role_to_user_and_project(
+ user_id, tenant_id, role_id)
+ except exception.Conflict:
+ pass
+
self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup))
def _paste_config(self, config):
@@ -648,6 +815,10 @@ class TestCase(BaseTestCase):
:param delta: Maximum allowable time delta, defined in seconds.
"""
+ if a == b:
+ # Short-circuit if the values are the same.
+ return
+
msg = '%s != %s within %s delta' % (a, b, delta)
self.assertTrue(abs(a - b).seconds <= delta, msg)
@@ -664,11 +835,11 @@ class TestCase(BaseTestCase):
if isinstance(expected_regexp, six.string_types):
expected_regexp = re.compile(expected_regexp)
- if isinstance(exc_value.args[0], unicode):
- if not expected_regexp.search(unicode(exc_value)):
+ if isinstance(exc_value.args[0], six.text_type):
+ if not expected_regexp.search(six.text_type(exc_value)):
raise self.failureException(
'"%s" does not match "%s"' %
- (expected_regexp.pattern, unicode(exc_value)))
+ (expected_regexp.pattern, six.text_type(exc_value)))
else:
if not expected_regexp.search(str(exc_value)):
raise self.failureException(
@@ -708,12 +879,29 @@ class TestCase(BaseTestCase):
class SQLDriverOverrides(object):
"""A mixin for consolidating sql-specific test overrides."""
+
def config_overrides(self):
super(SQLDriverOverrides, self).config_overrides()
# SQL specific driver overrides
self.config_fixture.config(group='catalog', driver='sql')
self.config_fixture.config(group='identity', driver='sql')
self.config_fixture.config(group='policy', driver='sql')
- self.config_fixture.config(group='revoke', driver='sql')
self.config_fixture.config(group='token', driver='sql')
self.config_fixture.config(group='trust', driver='sql')
+
+ def use_specific_sql_driver_version(self, driver_path,
+ versionless_backend, version_suffix):
+ """Add this versioned driver to the list that will be loaded.
+
+ :param driver_path: The path to the drivers, e.g. 'keystone.assignment'
+ :param versionless_backend: The name of the versionless drivers, e.g.
+ 'backends'
+ :param version_suffix: The suffix for the version , e.g. ``V8_``
+
+ This method assumes that versioned drivers are named:
+ <version_suffix><name of versionless driver>, e.g. 'V8_backends'.
+
+ """
+ self.sql_driver_version_overrides[driver_path] = {
+ 'versionless_backend': versionless_backend,
+ 'versioned_backend': version_suffix + versionless_backend}
diff --git a/keystone-moon/keystone/tests/unit/default_fixtures.py b/keystone-moon/keystone/tests/unit/default_fixtures.py
index 80b0665f..7f661986 100644
--- a/keystone-moon/keystone/tests/unit/default_fixtures.py
+++ b/keystone-moon/keystone/tests/unit/default_fixtures.py
@@ -14,53 +14,67 @@
# NOTE(dolph): please try to avoid additional fixtures if possible; test suite
# performance may be negatively affected.
+import uuid
+BAR_TENANT_ID = uuid.uuid4().hex
+BAZ_TENANT_ID = uuid.uuid4().hex
+MTU_TENANT_ID = uuid.uuid4().hex
+SERVICE_TENANT_ID = uuid.uuid4().hex
DEFAULT_DOMAIN_ID = 'default'
TENANTS = [
{
- 'id': 'bar',
+ 'id': BAR_TENANT_ID,
'name': 'BAR',
'domain_id': DEFAULT_DOMAIN_ID,
'description': 'description',
'enabled': True,
- 'parent_id': None,
+ 'parent_id': DEFAULT_DOMAIN_ID,
'is_domain': False,
}, {
- 'id': 'baz',
+ 'id': BAZ_TENANT_ID,
'name': 'BAZ',
'domain_id': DEFAULT_DOMAIN_ID,
'description': 'description',
'enabled': True,
- 'parent_id': None,
+ 'parent_id': DEFAULT_DOMAIN_ID,
'is_domain': False,
}, {
- 'id': 'mtu',
+ 'id': MTU_TENANT_ID,
'name': 'MTU',
'description': 'description',
'enabled': True,
'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': None,
+ 'parent_id': DEFAULT_DOMAIN_ID,
'is_domain': False,
}, {
- 'id': 'service',
+ 'id': SERVICE_TENANT_ID,
'name': 'service',
'description': 'description',
'enabled': True,
'domain_id': DEFAULT_DOMAIN_ID,
- 'parent_id': None,
+ 'parent_id': DEFAULT_DOMAIN_ID,
'is_domain': False,
}
]
# NOTE(ja): a role of keystone_admin is done in setUp
USERS = [
+ # NOTE(morganfainberg): Admin user for replacing admin_token_auth
+ {
+ 'id': 'reqadmin',
+ 'name': 'REQ_ADMIN',
+ 'domain_id': DEFAULT_DOMAIN_ID,
+ 'password': 'password',
+ 'tenants': [],
+ 'enabled': True
+ },
{
'id': 'foo',
'name': 'FOO',
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'foo2',
- 'tenants': ['bar'],
+ 'tenants': [BAR_TENANT_ID],
'enabled': True,
'email': 'foo@bar.com',
}, {
@@ -69,8 +83,8 @@ USERS = [
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'two2',
'enabled': True,
- 'default_project_id': 'baz',
- 'tenants': ['baz'],
+ 'default_project_id': BAZ_TENANT_ID,
+ 'tenants': [BAZ_TENANT_ID],
'email': 'two@three.com',
}, {
'id': 'badguy',
@@ -78,8 +92,8 @@ USERS = [
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'bad',
'enabled': False,
- 'default_project_id': 'baz',
- 'tenants': ['baz'],
+ 'default_project_id': BAZ_TENANT_ID,
+ 'tenants': [BAZ_TENANT_ID],
'email': 'bad@guy.com',
}, {
'id': 'sna',
@@ -87,7 +101,7 @@ USERS = [
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'snafu',
'enabled': True,
- 'tenants': ['bar'],
+ 'tenants': [BAR_TENANT_ID],
'email': 'sna@snl.coom',
}
]
@@ -96,30 +110,45 @@ ROLES = [
{
'id': 'admin',
'name': 'admin',
+ 'domain_id': None,
}, {
'id': 'member',
'name': 'Member',
+ 'domain_id': None,
}, {
'id': '9fe2ff9ee4384b1894a90878d3e92bab',
'name': '_member_',
+ 'domain_id': None,
}, {
'id': 'other',
'name': 'Other',
+ 'domain_id': None,
}, {
'id': 'browser',
'name': 'Browser',
+ 'domain_id': None,
}, {
'id': 'writer',
'name': 'Writer',
+ 'domain_id': None,
}, {
'id': 'service',
'name': 'Service',
+ 'domain_id': None,
}
]
+# NOTE(morganfainberg): Admin assignment for replacing admin_token_auth
+ROLE_ASSIGNMENTS = [
+ {
+ 'user': 'reqadmin',
+ 'tenant_id': SERVICE_TENANT_ID,
+ 'role_id': 'admin'
+ },
+]
+
DOMAINS = [{'description':
- (u'Owns users and tenants (i.e. projects)'
- ' available on Identity API v2.'),
+ (u'The default domain'),
'enabled': True,
'id': DEFAULT_DOMAIN_ID,
'name': u'Default'}]
diff --git a/keystone-moon/keystone/tests/unit/external/README.rst b/keystone-moon/keystone/tests/unit/external/README.rst
new file mode 100644
index 00000000..e8f9fa65
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/external/README.rst
@@ -0,0 +1,9 @@
+This directory contains interface tests for external libraries. The goal
+is not to test every possible path through a library's code and get 100%
+coverage. It's to give us a level of confidence that their general interface
+remains the same through version upgrades.
+
+This gives us a place to put these tests without having to litter our
+own tests with assertions that are not directly related to the code
+under test. The expectations for the external library are all in one
+place so it makes it easier for us to find out what they are.
diff --git a/keystone-moon/keystone/tests/unit/external/__init__.py b/keystone-moon/keystone/tests/unit/external/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/external/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/external/test_timeutils.py b/keystone-moon/keystone/tests/unit/external/test_timeutils.py
new file mode 100644
index 00000000..7fc72d58
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/external/test_timeutils.py
@@ -0,0 +1,33 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+from oslo_utils import timeutils
+
+import keystone.tests.unit as tests
+
+
+class TestTimeUtils(tests.BaseTestCase):
+
+ def test_parsing_date_strings_returns_a_datetime(self):
+ example_date_str = '2015-09-23T04:45:37.196621Z'
+ dt = timeutils.parse_strtime(example_date_str, fmt=tests.TIME_FORMAT)
+ self.assertIsInstance(dt, datetime.datetime)
+
+ def test_parsing_invalid_date_strings_raises_a_ValueError(self):
+ example_date_str = ''
+ simple_format = '%Y'
+ self.assertRaises(ValueError,
+ timeutils.parse_strtime,
+ example_date_str,
+ fmt=simple_format)
diff --git a/keystone-moon/keystone/tests/unit/fakeldap.py b/keystone-moon/keystone/tests/unit/fakeldap.py
index 2f1ebe57..9ad1f218 100644
--- a/keystone-moon/keystone/tests/unit/fakeldap.py
+++ b/keystone-moon/keystone/tests/unit/fakeldap.py
@@ -18,10 +18,11 @@
This class does very little error checking, and knows nothing about ldap
class definitions. It implements the minimum emulation of the python ldap
-library to work with nova.
+library to work with keystone.
"""
+import random
import re
import shelve
@@ -67,7 +68,13 @@ def _internal_attr(attr_name, value_or_values):
if dn == 'cn=Doe\\, John,ou=Users,cn=example,cn=com':
return 'CN=Doe\\2C John,OU=Users,CN=example,CN=com'
- dn = ldap.dn.str2dn(core.utf8_encode(dn))
+ try:
+ dn = ldap.dn.str2dn(core.utf8_encode(dn))
+ except ldap.DECODING_ERROR:
+ # NOTE(amakarov): In case of IDs instead of DNs in group members
+ # they must be handled as regular values.
+ return normalize_value(dn)
+
norm = []
for part in dn:
name, val, i = part[0]
@@ -132,7 +139,6 @@ def _paren_groups(source):
def _match(key, value, attrs):
"""Match a given key and value against an attribute list."""
-
def match_with_wildcards(norm_val, val_list):
# Case insensitive checking with wildcards
if norm_val.startswith('*'):
@@ -209,6 +215,7 @@ class FakeShelve(dict):
FakeShelves = {}
+PendingRequests = {}
class FakeLdap(core.LDAPHandler):
@@ -534,18 +541,60 @@ class FakeLdap(core.LDAPHandler):
self._ldap_options[option] = invalue
def get_option(self, option):
- value = self._ldap_options.get(option, None)
+ value = self._ldap_options.get(option)
return value
def search_ext(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
serverctrls=None, clientctrls=None,
timeout=-1, sizelimit=0):
- raise exception.NotImplemented()
+ if clientctrls is not None or timeout != -1 or sizelimit != 0:
+ raise exception.NotImplemented()
+
+ # only passing a single server control is supported by this fake ldap
+ if len(serverctrls) > 1:
+ raise exception.NotImplemented()
+
+ # search_ext is async and returns an identifier used for
+ # retrieving the results via result3(). This will be emulated by
+ # storing the request in a variable with random integer key and
+ # performing the real lookup in result3()
+ msgid = random.randint(0, 1000)
+ PendingRequests[msgid] = (base, scope, filterstr, attrlist, attrsonly,
+ serverctrls)
+ return msgid
def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None,
resp_ctrl_classes=None):
- raise exception.NotImplemented()
+ """Execute async request
+
+ Only msgid param is supported. Request info is fetched from global
+ variable `PendingRequests` by msgid, executed using search_s and
+ limited if requested.
+ """
+ if all != 1 or timeout is not None or resp_ctrl_classes is not None:
+ raise exception.NotImplemented()
+
+ params = PendingRequests[msgid]
+ # search_s accepts a subset of parameters of search_ext,
+ # that's why we use only the first 5.
+ results = self.search_s(*params[:5])
+
+ # extract limit from serverctrl
+ serverctrls = params[5]
+ ctrl = serverctrls[0]
+
+ if ctrl.size:
+ rdata = results[:ctrl.size]
+ else:
+ rdata = results
+
+ # real result3 returns various service info -- rtype, rmsgid,
+ # serverctrls. Now this info is not used, so all this info is None
+ rtype = None
+ rmsgid = None
+ serverctrls = None
+ return (rtype, rdata, rmsgid, serverctrls)
class FakeLdapPool(FakeLdap):
diff --git a/keystone-moon/keystone/tests/unit/filtering.py b/keystone-moon/keystone/tests/unit/filtering.py
index 93e0bc28..59301299 100644
--- a/keystone-moon/keystone/tests/unit/filtering.py
+++ b/keystone-moon/keystone/tests/unit/filtering.py
@@ -49,7 +49,6 @@ class FilterTests(object):
one.
"""
-
f = getattr(self.identity_api, 'create_%s' % entity_type, None)
if f is None:
f = getattr(self.resource_api, 'create_%s' % entity_type, None)
@@ -65,7 +64,6 @@ class FilterTests(object):
one.
"""
-
f = getattr(self.identity_api, 'delete_%s' % entity_type, None)
if f is None:
f = getattr(self.resource_api, 'delete_%s' % entity_type, None)
@@ -81,7 +79,6 @@ class FilterTests(object):
one.
"""
-
f = getattr(self.identity_api, 'list_%ss' % entity_type, None)
if f is None:
f = getattr(self.resource_api, 'list_%ss' % entity_type, None)
diff --git a/keystone-moon/keystone/tests/unit/identity/test_backends.py b/keystone-moon/keystone/tests/unit/identity/test_backends.py
new file mode 100644
index 00000000..8b5c0def
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/identity/test_backends.py
@@ -0,0 +1,1297 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+import mock
+from oslo_config import cfg
+from six.moves import range
+from testtools import matchers
+
+from keystone.common import driver_hints
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit import filtering
+
+
+CONF = cfg.CONF
+
+
+class IdentityTests(object):
+
+ def _get_domain_fixture(self):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ return domain
+
+ def _set_domain_scope(self, domain_id):
+ # We only provide a domain scope if we have multiple drivers
+ if CONF.identity.domain_specific_drivers_enabled:
+ return domain_id
+
+ def test_authenticate_bad_user(self):
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=uuid.uuid4().hex,
+ password=self.user_foo['password'])
+
+ def test_authenticate_bad_password(self):
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=self.user_foo['id'],
+ password=uuid.uuid4().hex)
+
+ def test_authenticate(self):
+ user_ref = self.identity_api.authenticate(
+ context={},
+ user_id=self.user_sna['id'],
+ password=self.user_sna['password'])
+ # NOTE(termie): the password field is left in user_sna to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_sna.pop('password')
+ self.user_sna['enabled'] = True
+ self.assertDictEqual(self.user_sna, user_ref)
+
+ def test_authenticate_and_get_roles_no_metadata(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+
+ # Remove user id. It is ignored by create_user() and will break the
+ # subset test below.
+ del user['id']
+
+ new_user = self.identity_api.create_user(user)
+ self.assignment_api.add_user_to_project(self.tenant_baz['id'],
+ new_user['id'])
+ user_ref = self.identity_api.authenticate(
+ context={},
+ user_id=new_user['id'],
+ password=user['password'])
+ self.assertNotIn('password', user_ref)
+ # NOTE(termie): the password field is left in user_sna to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ user.pop('password')
+ self.assertDictContainsSubset(user, user_ref)
+ role_list = self.assignment_api.get_roles_for_user_and_project(
+ new_user['id'], self.tenant_baz['id'])
+ self.assertEqual(1, len(role_list))
+ self.assertIn(CONF.member_role_id, role_list)
+
+ def test_authenticate_if_no_password_set(self):
+ id_ = uuid.uuid4().hex
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ self.identity_api.create_user(user)
+
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=id_,
+ password='password')
+
+ def test_create_unicode_user_name(self):
+ unicode_name = u'name \u540d\u5b57'
+ user = unit.new_user_ref(name=unicode_name,
+ domain_id=CONF.identity.default_domain_id)
+ ref = self.identity_api.create_user(user)
+ self.assertEqual(unicode_name, ref['name'])
+
+ def test_get_user(self):
+ user_ref = self.identity_api.get_user(self.user_foo['id'])
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_foo.pop('password')
+ self.assertDictEqual(self.user_foo, user_ref)
+
+ @unit.skip_if_cache_disabled('identity')
+ def test_cache_layer_get_user(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ # cache the result.
+ self.identity_api.get_user(ref['id'])
+ # delete bypassing identity api
+ domain_id, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(ref['id']))
+ driver.delete_user(entity_id)
+
+ self.assertDictEqual(ref, self.identity_api.get_user(ref['id']))
+ self.identity_api.get_user.invalidate(self.identity_api, ref['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user, ref['id'])
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ user['description'] = uuid.uuid4().hex
+ # cache the result.
+ self.identity_api.get_user(ref['id'])
+ # update using identity api and get back updated user.
+ user_updated = self.identity_api.update_user(ref['id'], user)
+ self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
+ user_updated)
+ self.assertDictContainsSubset(
+ self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
+ user_updated)
+
+ def test_get_user_returns_not_found(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ uuid.uuid4().hex)
+
+ def test_get_user_by_name(self):
+ user_ref = self.identity_api.get_user_by_name(
+ self.user_foo['name'], CONF.identity.default_domain_id)
+ # NOTE(termie): the password field is left in user_foo to make
+ # it easier to authenticate in tests, but should
+ # not be returned by the api
+ self.user_foo.pop('password')
+ self.assertDictEqual(self.user_foo, user_ref)
+
+ @unit.skip_if_cache_disabled('identity')
+ def test_cache_layer_get_user_by_name(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ # delete bypassing the identity api.
+ domain_id, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(ref['id']))
+ driver.delete_user(entity_id)
+
+ self.assertDictEqual(ref, self.identity_api.get_user_by_name(
+ user['name'], CONF.identity.default_domain_id))
+ self.identity_api.get_user_by_name.invalidate(
+ self.identity_api, user['name'], CONF.identity.default_domain_id)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user_by_name,
+ user['name'], CONF.identity.default_domain_id)
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ ref = self.identity_api.get_user_by_name(user['name'],
+ user['domain_id'])
+ user['description'] = uuid.uuid4().hex
+ user_updated = self.identity_api.update_user(ref['id'], user)
+ self.assertDictContainsSubset(self.identity_api.get_user(ref['id']),
+ user_updated)
+ self.assertDictContainsSubset(
+ self.identity_api.get_user_by_name(ref['name'], ref['domain_id']),
+ user_updated)
+
+ def test_get_user_by_name_returns_not_found(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user_by_name,
+ uuid.uuid4().hex,
+ CONF.identity.default_domain_id)
+
+ def test_create_duplicate_user_name_fails(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_user,
+ user)
+
+ def test_create_duplicate_user_name_in_different_domains(self):
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+
+ user2 = unit.new_user_ref(name=user1['name'],
+ domain_id=new_domain['id'])
+
+ self.identity_api.create_user(user1)
+ self.identity_api.create_user(user2)
+
+ def test_move_user_between_domains(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ user = unit.new_user_ref(domain_id=domain1['id'])
+ user = self.identity_api.create_user(user)
+ user['domain_id'] = domain2['id']
+ # Update the user asserting that a deprecation warning is emitted
+ with mock.patch(
+ 'oslo_log.versionutils.report_deprecated_feature') as mock_dep:
+ self.identity_api.update_user(user['id'], user)
+ self.assertTrue(mock_dep.called)
+
+ updated_user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(domain2['id'], updated_user_ref['domain_id'])
+
+ def test_move_user_between_domains_with_clashing_names_fails(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ # First, create a user in domain1
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ # Now create a user in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ user2 = unit.new_user_ref(name=user1['name'],
+ domain_id=domain2['id'])
+ user2 = self.identity_api.create_user(user2)
+ # Now try and move user1 into the 2nd domain - which should
+ # fail since the names clash
+ user1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_user,
+ user1['id'],
+ user1)
+
+ def test_rename_duplicate_user_name_fails(self):
+ user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user2 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ self.identity_api.create_user(user1)
+ user2 = self.identity_api.create_user(user2)
+ user2['name'] = user1['name']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_user,
+ user2['id'],
+ user2)
+
+ def test_update_user_id_fails(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ original_id = user['id']
+ user['id'] = 'fake2'
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ original_id,
+ user)
+ user_ref = self.identity_api.get_user(original_id)
+ self.assertEqual(original_id, user_ref['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ 'fake2')
+
+ def test_delete_user_with_group_project_domain_links(self):
+ role1 = unit.new_role_ref()
+ self.role_api.create_role(role1['id'], role1)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ self.assignment_api.create_grant(user_id=user1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(user_id=user1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.identity_api.add_user_to_group(user_id=user1['id'],
+ group_id=group1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ user_id=user1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.identity_api.check_user_in_group(
+ user_id=user1['id'],
+ group_id=group1['id'])
+ self.identity_api.delete_user(user1['id'])
+ self.assertRaises(exception.NotFound,
+ self.identity_api.check_user_in_group,
+ user1['id'],
+ group1['id'])
+
+ def test_delete_group_with_user_project_domain_links(self):
+ role1 = unit.new_role_ref()
+ self.role_api.create_role(role1['id'], role1)
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ user1 = unit.new_user_ref(domain_id=domain1['id'])
+ user1 = self.identity_api.create_user(user1)
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+
+ self.assignment_api.create_grant(group_id=group1['id'],
+ project_id=project1['id'],
+ role_id=role1['id'])
+ self.assignment_api.create_grant(group_id=group1['id'],
+ domain_id=domain1['id'],
+ role_id=role1['id'])
+ self.identity_api.add_user_to_group(user_id=user1['id'],
+ group_id=group1['id'])
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ project_id=project1['id'])
+ self.assertEqual(1, len(roles_ref))
+ roles_ref = self.assignment_api.list_grants(
+ group_id=group1['id'],
+ domain_id=domain1['id'])
+ self.assertEqual(1, len(roles_ref))
+ self.identity_api.check_user_in_group(
+ user_id=user1['id'],
+ group_id=group1['id'])
+ self.identity_api.delete_group(group1['id'])
+ self.identity_api.get_user(user1['id'])
+
+ def test_update_user_returns_not_found(self):
+ user_id = uuid.uuid4().hex
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.update_user,
+ user_id,
+ {'id': user_id,
+ 'domain_id': CONF.identity.default_domain_id})
+
+ def test_delete_user_returns_not_found(self):
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.delete_user,
+ uuid.uuid4().hex)
+
+ def test_create_user_long_name_fails(self):
+ user = unit.new_user_ref(name='a' * 256,
+ domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_create_user_blank_name_fails(self):
+ user = unit.new_user_ref(name='',
+ domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_create_user_missed_password(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ self.identity_api.get_user(user['id'])
+ # Make sure the user is not allowed to login
+ # with a password that is empty string or None
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password='')
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password=None)
+
+ def test_create_user_none_password(self):
+ user = unit.new_user_ref(password=None,
+ domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ self.identity_api.get_user(user['id'])
+ # Make sure the user is not allowed to login
+ # with a password that is empty string or None
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password='')
+ self.assertRaises(AssertionError,
+ self.identity_api.authenticate,
+ context={},
+ user_id=user['id'],
+ password=None)
+
+ def test_create_user_invalid_name_fails(self):
+ user = unit.new_user_ref(name=None,
+ domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ user = unit.new_user_ref(name=123,
+ domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_create_user_invalid_enabled_type_string(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id,
+ # invalid string value
+ enabled='true')
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.create_user,
+ user)
+
+ def test_update_user_long_name_fails(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ user['name'] = 'a' * 256
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_update_user_blank_name_fails(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ user['name'] = ''
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_update_user_invalid_name_fails(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+
+ user['name'] = None
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ user['name'] = 123
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_list_users(self):
+ users = self.identity_api.list_users(
+ domain_scope=self._set_domain_scope(
+ CONF.identity.default_domain_id))
+ self.assertEqual(len(default_fixtures.USERS), len(users))
+ user_ids = set(user['id'] for user in users)
+ expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
+ for user in default_fixtures.USERS)
+ for user_ref in users:
+ self.assertNotIn('password', user_ref)
+ self.assertEqual(expected_user_ids, user_ids)
+
+ def test_list_groups(self):
+ group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group2 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group1 = self.identity_api.create_group(group1)
+ group2 = self.identity_api.create_group(group2)
+ groups = self.identity_api.list_groups(
+ domain_scope=self._set_domain_scope(
+ CONF.identity.default_domain_id))
+ self.assertEqual(2, len(groups))
+ group_ids = []
+ for group in groups:
+ group_ids.append(group.get('id'))
+ self.assertIn(group1['id'], group_ids)
+ self.assertIn(group2['id'], group_ids)
+
+ def test_create_user_doesnt_modify_passed_in_dict(self):
+ new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ original_user = new_user.copy()
+ self.identity_api.create_user(new_user)
+ self.assertDictEqual(original_user, new_user)
+
+ def test_update_user_enable(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertTrue(user_ref['enabled'])
+
+ user['enabled'] = False
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(user['enabled'], user_ref['enabled'])
+
+ # If not present, enabled field should not be updated
+ del user['enabled']
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertFalse(user_ref['enabled'])
+
+ user['enabled'] = True
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(user['enabled'], user_ref['enabled'])
+
+ del user['enabled']
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertTrue(user_ref['enabled'])
+
+ # Integers are valid Python's booleans. Explicitly test it.
+ user['enabled'] = 0
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertFalse(user_ref['enabled'])
+
+ # Any integers other than 0 are interpreted as True
+ user['enabled'] = -42
+ self.identity_api.update_user(user['id'], user)
+ user_ref = self.identity_api.get_user(user['id'])
+ # NOTE(breton): below, attribute `enabled` is explicitly tested to be
+ # equal True. assertTrue should not be used, because it converts
+ # the passed value to bool().
+ self.assertIs(user_ref['enabled'], True)
+
+ def test_update_user_name(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertEqual(user['name'], user_ref['name'])
+
+ changed_name = user_ref['name'] + '_changed'
+ user_ref['name'] = changed_name
+ updated_user = self.identity_api.update_user(user_ref['id'], user_ref)
+
+ # NOTE(dstanek): the SQL backend adds an 'extra' field containing a
+ # dictionary of the extra fields in addition to the
+ # fields in the object. For the details see:
+ # SqlIdentity.test_update_project_returns_extra
+ updated_user.pop('extra', None)
+
+ self.assertDictEqual(user_ref, updated_user)
+
+ user_ref = self.identity_api.get_user(user_ref['id'])
+ self.assertEqual(changed_name, user_ref['name'])
+
+ def test_update_user_enable_fails(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertTrue(user_ref['enabled'])
+
+ # Strings are not valid boolean values
+ user['enabled'] = 'false'
+ self.assertRaises(exception.ValidationError,
+ self.identity_api.update_user,
+ user['id'],
+ user)
+
+ def test_add_user_to_group(self):
+ domain = self._get_domain_fixture()
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+
+ found = False
+ for x in groups:
+ if (x['id'] == new_group['id']):
+ found = True
+ self.assertTrue(found)
+
+ def test_add_user_to_group_returns_not_found(self):
+ domain = self._get_domain_fixture()
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.add_user_to_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.add_user_to_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.add_user_to_group,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_check_user_in_group(self):
+ domain = self._get_domain_fixture()
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ self.identity_api.check_user_in_group(new_user['id'], new_group['id'])
+
+ def test_check_user_not_in_group(self):
+ new_group = unit.new_group_ref(
+ domain_id=CONF.identity.default_domain_id)
+ new_group = self.identity_api.create_group(new_group)
+
+ new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ new_user = self.identity_api.create_user(new_user)
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.check_user_in_group,
+ new_user['id'],
+ new_group['id'])
+
+ def test_check_user_in_group_returns_not_found(self):
+ new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ new_user = self.identity_api.create_user(new_user)
+
+ new_group = unit.new_group_ref(
+ domain_id=CONF.identity.default_domain_id)
+ new_group = self.identity_api.create_group(new_group)
+
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.check_user_in_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.check_user_in_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.check_user_in_group,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_list_users_in_group(self):
+ domain = self._get_domain_fixture()
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ # Make sure we get an empty list back on a new group, not an error.
+ user_refs = self.identity_api.list_users_in_group(new_group['id'])
+ self.assertEqual([], user_refs)
+ # Make sure we get the correct users back once they have been added
+ # to the group.
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ user_refs = self.identity_api.list_users_in_group(new_group['id'])
+ found = False
+ for x in user_refs:
+ if (x['id'] == new_user['id']):
+ found = True
+ self.assertNotIn('password', x)
+ self.assertTrue(found)
+
+ def test_list_users_in_group_returns_not_found(self):
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.list_users_in_group,
+ uuid.uuid4().hex)
+
+ def test_list_groups_for_user(self):
+ domain = self._get_domain_fixture()
+ test_groups = []
+ test_users = []
+ GROUP_COUNT = 3
+ USER_COUNT = 2
+
+ for x in range(0, USER_COUNT):
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ test_users.append(new_user)
+ positive_user = test_users[0]
+ negative_user = test_users[1]
+
+ for x in range(0, USER_COUNT):
+ group_refs = self.identity_api.list_groups_for_user(
+ test_users[x]['id'])
+ self.assertEqual(0, len(group_refs))
+
+ for x in range(0, GROUP_COUNT):
+ before_count = x
+ after_count = x + 1
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ test_groups.append(new_group)
+
+ # add the user to the group and ensure that the
+ # group count increases by one for each
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(before_count, len(group_refs))
+ self.identity_api.add_user_to_group(
+ positive_user['id'],
+ new_group['id'])
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(after_count, len(group_refs))
+
+ # Make sure the group count for the unrelated user did not change
+ group_refs = self.identity_api.list_groups_for_user(
+ negative_user['id'])
+ self.assertEqual(0, len(group_refs))
+
+ # remove the user from each group and ensure that
+ # the group count reduces by one for each
+ for x in range(0, 3):
+ before_count = GROUP_COUNT - x
+ after_count = GROUP_COUNT - x - 1
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(before_count, len(group_refs))
+ self.identity_api.remove_user_from_group(
+ positive_user['id'],
+ test_groups[x]['id'])
+ group_refs = self.identity_api.list_groups_for_user(
+ positive_user['id'])
+ self.assertEqual(after_count, len(group_refs))
+ # Make sure the group count for the unrelated user
+ # did not change
+ group_refs = self.identity_api.list_groups_for_user(
+ negative_user['id'])
+ self.assertEqual(0, len(group_refs))
+
+ def test_remove_user_from_group(self):
+ domain = self._get_domain_fixture()
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+ self.assertIn(new_group['id'], [x['id'] for x in groups])
+ self.identity_api.remove_user_from_group(new_user['id'],
+ new_group['id'])
+ groups = self.identity_api.list_groups_for_user(new_user['id'])
+ self.assertNotIn(new_group['id'], [x['id'] for x in groups])
+
+ def test_remove_user_from_group_returns_not_found(self):
+ domain = self._get_domain_fixture()
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.remove_user_from_group,
+ new_user['id'],
+ uuid.uuid4().hex)
+
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.remove_user_from_group,
+ uuid.uuid4().hex,
+ new_group['id'])
+
+ self.assertRaises(exception.NotFound,
+ self.identity_api.remove_user_from_group,
+ uuid.uuid4().hex,
+ uuid.uuid4().hex)
+
+ def test_group_crud(self):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ group = unit.new_group_ref(domain_id=domain['id'])
+ group = self.identity_api.create_group(group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictContainsSubset(group, group_ref)
+
+ group['name'] = uuid.uuid4().hex
+ self.identity_api.update_group(group['id'], group)
+ group_ref = self.identity_api.get_group(group['id'])
+ self.assertDictContainsSubset(group, group_ref)
+
+ self.identity_api.delete_group(group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ group['id'])
+
+ def test_get_group_by_name(self):
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group_name = group['name']
+ group = self.identity_api.create_group(group)
+ spoiler = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ self.identity_api.create_group(spoiler)
+
+ group_ref = self.identity_api.get_group_by_name(
+ group_name, CONF.identity.default_domain_id)
+ self.assertDictEqual(group, group_ref)
+
+ def test_get_group_by_name_returns_not_found(self):
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group_by_name,
+ uuid.uuid4().hex,
+ CONF.identity.default_domain_id)
+
+ @unit.skip_if_cache_disabled('identity')
+ def test_cache_layer_group_crud(self):
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+ # cache the result
+ group_ref = self.identity_api.get_group(group['id'])
+ # delete the group bypassing identity api.
+ domain_id, driver, entity_id = (
+ self.identity_api._get_domain_driver_and_entity_id(group['id']))
+ driver.delete_group(entity_id)
+
+ self.assertEqual(group_ref, self.identity_api.get_group(group['id']))
+ self.identity_api.get_group.invalidate(self.identity_api, group['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group, group['id'])
+
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+ # cache the result
+ self.identity_api.get_group(group['id'])
+ group['name'] = uuid.uuid4().hex
+ group_ref = self.identity_api.update_group(group['id'], group)
+ # after updating through identity api, get updated group
+ self.assertDictContainsSubset(self.identity_api.get_group(group['id']),
+ group_ref)
+
+ def test_create_duplicate_group_name_fails(self):
+ group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group2 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id,
+ name=group1['name'])
+ group1 = self.identity_api.create_group(group1)
+ self.assertRaises(exception.Conflict,
+ self.identity_api.create_group,
+ group2)
+
+ def test_create_duplicate_group_name_in_different_domains(self):
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group2 = unit.new_group_ref(domain_id=new_domain['id'],
+ name=group1['name'])
+ group1 = self.identity_api.create_group(group1)
+ group2 = self.identity_api.create_group(group2)
+
+ def test_move_group_between_domains(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ group = unit.new_group_ref(domain_id=domain1['id'])
+ group = self.identity_api.create_group(group)
+ group['domain_id'] = domain2['id']
+ # Update the group asserting that a deprecation warning is emitted
+ with mock.patch(
+ 'oslo_log.versionutils.report_deprecated_feature') as mock_dep:
+ self.identity_api.update_group(group['id'], group)
+ self.assertTrue(mock_dep.called)
+
+ updated_group_ref = self.identity_api.get_group(group['id'])
+ self.assertEqual(domain2['id'], updated_group_ref['domain_id'])
+
+ def test_move_group_between_domains_with_clashing_names_fails(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ # First, create a group in domain1
+ group1 = unit.new_group_ref(domain_id=domain1['id'])
+ group1 = self.identity_api.create_group(group1)
+ # Now create a group in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ group2 = unit.new_group_ref(name=group1['name'],
+ domain_id=domain2['id'])
+ group2 = self.identity_api.create_group(group2)
+ # Now try and move group1 into the 2nd domain - which should
+ # fail since the names clash
+ group1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.identity_api.update_group,
+ group1['id'],
+ group1)
+
+ def test_user_crud(self):
+ user_dict = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+ del user_dict['id']
+ user = self.identity_api.create_user(user_dict)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user_dict['password']
+ user_ref_dict = {x: user_ref[x] for x in user_ref}
+ self.assertDictContainsSubset(user_dict, user_ref_dict)
+
+ user_dict['password'] = uuid.uuid4().hex
+ self.identity_api.update_user(user['id'], user_dict)
+ user_ref = self.identity_api.get_user(user['id'])
+ del user_dict['password']
+ user_ref_dict = {x: user_ref[x] for x in user_ref}
+ self.assertDictContainsSubset(user_dict, user_ref_dict)
+
+ self.identity_api.delete_user(user['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user['id'])
+
+ def test_arbitrary_attributes_are_returned_from_create_user(self):
+ attr_value = uuid.uuid4().hex
+ user_data = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id,
+ arbitrary_attr=attr_value)
+
+ user = self.identity_api.create_user(user_data)
+
+ self.assertEqual(attr_value, user['arbitrary_attr'])
+
+ def test_arbitrary_attributes_are_returned_from_get_user(self):
+ attr_value = uuid.uuid4().hex
+ user_data = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id,
+ arbitrary_attr=attr_value)
+
+ user_data = self.identity_api.create_user(user_data)
+
+ user = self.identity_api.get_user(user_data['id'])
+ self.assertEqual(attr_value, user['arbitrary_attr'])
+
+ def test_new_arbitrary_attributes_are_returned_from_update_user(self):
+ user_data = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+
+ user = self.identity_api.create_user(user_data)
+ attr_value = uuid.uuid4().hex
+ user['arbitrary_attr'] = attr_value
+ updated_user = self.identity_api.update_user(user['id'], user)
+
+ self.assertEqual(attr_value, updated_user['arbitrary_attr'])
+
+ def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
+ attr_value = uuid.uuid4().hex
+ user_data = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id,
+ arbitrary_attr=attr_value)
+
+ new_attr_value = uuid.uuid4().hex
+ user = self.identity_api.create_user(user_data)
+ user['arbitrary_attr'] = new_attr_value
+ updated_user = self.identity_api.update_user(user['id'], user)
+
+ self.assertEqual(new_attr_value, updated_user['arbitrary_attr'])
+
+ def test_user_update_and_user_get_return_same_response(self):
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+
+ user = self.identity_api.create_user(user)
+
+ updated_user = {'enabled': False}
+ updated_user_ref = self.identity_api.update_user(
+ user['id'], updated_user)
+
+ # SQL backend adds 'extra' field
+ updated_user_ref.pop('extra', None)
+
+ self.assertIs(False, updated_user_ref['enabled'])
+
+ user_ref = self.identity_api.get_user(user['id'])
+ self.assertDictEqual(updated_user_ref, user_ref)
+
+
+class FilterTests(filtering.FilterTests):
+ def test_list_entities_filtered(self):
+ for entity in ['user', 'group', 'project']:
+ # Create 20 entities
+ entity_list = self._create_test_data(entity, 20)
+
+ # Try filtering to get one an exact item out of the list
+ hints = driver_hints.Hints()
+ hints.add_filter('name', entity_list[10]['name'])
+ entities = self._list_entities(entity)(hints=hints)
+ self.assertEqual(1, len(entities))
+ self.assertEqual(entity_list[10]['id'], entities[0]['id'])
+ # Check the driver has removed the filter from the list hints
+ self.assertFalse(hints.get_exact_filter_by_name('name'))
+ self._delete_test_data(entity, entity_list)
+
+ def test_list_users_inexact_filtered(self):
+ # Create 20 users, some with specific names. We set the names at create
+ # time (rather than updating them), since the LDAP driver does not
+ # support name updates.
+ user_name_data = {
+ # user index: name for user
+ 5: 'The',
+ 6: 'The Ministry',
+ 7: 'The Ministry of',
+ 8: 'The Ministry of Silly',
+ 9: 'The Ministry of Silly Walks',
+ # ...and one for useful case insensitivity testing
+ 10: 'The ministry of silly walks OF'
+ }
+ user_list = self._create_test_data(
+ 'user', 20, domain_id=CONF.identity.default_domain_id,
+ name_dict=user_name_data)
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'ministry', comparator='contains')
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual(5, len(users))
+ self._match_with_list(users, user_list,
+ list_start=6, list_end=11)
+ # TODO(henry-nash) Check inexact filter has been removed.
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'The', comparator='startswith')
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual(6, len(users))
+ self._match_with_list(users, user_list,
+ list_start=5, list_end=11)
+ # TODO(henry-nash) Check inexact filter has been removed.
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'of', comparator='endswith')
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual(2, len(users))
+ # We can't assume we will get back the users in any particular order
+ self.assertIn(user_list[7]['id'], [users[0]['id'], users[1]['id']])
+ self.assertIn(user_list[10]['id'], [users[0]['id'], users[1]['id']])
+ # TODO(henry-nash) Check inexact filter has been removed.
+
+ # TODO(henry-nash): Add some case sensitive tests. However,
+ # these would be hard to validate currently, since:
+ #
+ # For SQL, the issue is that MySQL 0.7, by default, is installed in
+ # case insensitive mode (which is what is run by default for our
+ # SQL backend tests). For production deployments. OpenStack
+ # assumes a case sensitive database. For these tests, therefore, we
+ # need to be able to check the sensitivity of the database so as to
+ # know whether to run case sensitive tests here.
+ #
+ # For LDAP/AD, although dependent on the schema being used, attributes
+ # are typically configured to be case aware, but not case sensitive.
+
+ self._delete_test_data('user', user_list)
+
+ def _groups_for_user_data(self):
+ number_of_groups = 10
+ group_name_data = {
+ # entity index: name for entity
+ 5: 'The',
+ 6: 'The Ministry',
+ 9: 'The Ministry of Silly Walks',
+ }
+ group_list = self._create_test_data(
+ 'group', number_of_groups,
+ domain_id=CONF.identity.default_domain_id,
+ name_dict=group_name_data)
+ user_list = self._create_test_data('user', 2)
+
+ for group in range(7):
+ # Create membership, including with two out of the three groups
+ # with well know names
+ self.identity_api.add_user_to_group(user_list[0]['id'],
+ group_list[group]['id'])
+ # ...and some spoiler memberships
+ for group in range(7, number_of_groups):
+ self.identity_api.add_user_to_group(user_list[1]['id'],
+ group_list[group]['id'])
+
+ return group_list, user_list
+
+ def test_groups_for_user_inexact_filtered(self):
+ """Test use of filtering doesn't break groups_for_user listing.
+
+ Some backends may use filtering to achieve the list of groups for a
+ user, so test that it can combine a second filter.
+
+ Test Plan:
+
+ - Create 10 groups, some with names we can filter on
+ - Create 2 users
+ - Assign 1 of those users to most of the groups, including some of the
+ well known named ones
+ - Assign the other user to other groups as spoilers
+ - Ensure that when we list groups for users with a filter on the group
+ name, both restrictions have been enforced on what is returned.
+
+ """
+ group_list, user_list = self._groups_for_user_data()
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'Ministry', comparator='contains')
+ groups = self.identity_api.list_groups_for_user(
+ user_list[0]['id'], hints=hints)
+ # We should only get back one group, since of the two that contain
+ # 'Ministry' the user only belongs to one.
+ self.assertThat(len(groups), matchers.Equals(1))
+ self.assertEqual(group_list[6]['id'], groups[0]['id'])
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'The', comparator='startswith')
+ groups = self.identity_api.list_groups_for_user(
+ user_list[0]['id'], hints=hints)
+ # We should only get back 2 out of the 3 groups that start with 'The'
+ # hence showing that both "filters" have been applied
+ self.assertThat(len(groups), matchers.Equals(2))
+ self.assertIn(group_list[5]['id'], [groups[0]['id'], groups[1]['id']])
+ self.assertIn(group_list[6]['id'], [groups[0]['id'], groups[1]['id']])
+
+ hints.add_filter('name', 'The', comparator='endswith')
+ groups = self.identity_api.list_groups_for_user(
+ user_list[0]['id'], hints=hints)
+ # We should only get back one group since it is the only one that
+ # ends with 'The'
+ self.assertThat(len(groups), matchers.Equals(1))
+ self.assertEqual(group_list[5]['id'], groups[0]['id'])
+
+ self._delete_test_data('user', user_list)
+ self._delete_test_data('group', group_list)
+
+ def test_groups_for_user_exact_filtered(self):
+ """Test exact filters doesn't break groups_for_user listing."""
+ group_list, user_list = self._groups_for_user_data()
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'The Ministry', comparator='equals')
+ groups = self.identity_api.list_groups_for_user(
+ user_list[0]['id'], hints=hints)
+ # We should only get back 1 out of the 3 groups with name 'The
+ # Ministry' hence showing that both "filters" have been applied.
+ self.assertEqual(1, len(groups))
+ self.assertEqual(group_list[6]['id'], groups[0]['id'])
+ self._delete_test_data('user', user_list)
+ self._delete_test_data('group', group_list)
+
+ def _get_user_name_field_size(self):
+ """Return the size of the user name field for the backend.
+
+ Subclasses can override this method to indicate that the user name
+ field is limited in length. The user name is the field used in the test
+ that validates that a filter value works even if it's longer than a
+ field.
+
+ If the backend doesn't limit the value length then return None.
+
+ """
+ return None
+
+ def test_filter_value_wider_than_field(self):
+ # If a filter value is given that's larger than the field in the
+ # backend then no values are returned.
+
+ user_name_field_size = self._get_user_name_field_size()
+
+ if user_name_field_size is None:
+ # The backend doesn't limit the size of the user name, so pass this
+ # test.
+ return
+
+ # Create some users just to make sure would return something if the
+ # filter was ignored.
+ self._create_test_data('user', 2)
+
+ hints = driver_hints.Hints()
+ value = 'A' * (user_name_field_size + 1)
+ hints.add_filter('name', value)
+ users = self.identity_api.list_users(hints=hints)
+ self.assertEqual([], users)
+
+ def _list_users_in_group_data(self):
+ number_of_users = 10
+ user_name_data = {
+ 1: 'Arthur Conan Doyle',
+ 3: 'Arthur Rimbaud',
+ 9: 'Arthur Schopenhauer',
+ }
+ user_list = self._create_test_data(
+ 'user', number_of_users,
+ domain_id=CONF.identity.default_domain_id,
+ name_dict=user_name_data)
+ group = self._create_one_entity(
+ 'group', CONF.identity.default_domain_id, 'Great Writers')
+ for i in range(7):
+ self.identity_api.add_user_to_group(user_list[i]['id'],
+ group['id'])
+
+ return user_list, group
+
+ def test_list_users_in_group_inexact_filtered(self):
+ user_list, group = self._list_users_in_group_data()
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'Arthur', comparator='contains')
+ users = self.identity_api.list_users_in_group(group['id'], hints=hints)
+ self.assertThat(len(users), matchers.Equals(2))
+ self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']])
+ self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']])
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'Arthur', comparator='startswith')
+ users = self.identity_api.list_users_in_group(group['id'], hints=hints)
+ self.assertThat(len(users), matchers.Equals(2))
+ self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']])
+ self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']])
+
+ hints = driver_hints.Hints()
+ hints.add_filter('name', 'Doyle', comparator='endswith')
+ users = self.identity_api.list_users_in_group(group['id'], hints=hints)
+ self.assertThat(len(users), matchers.Equals(1))
+ self.assertEqual(user_list[1]['id'], users[0]['id'])
+
+ self._delete_test_data('user', user_list)
+ self._delete_entity('group')(group['id'])
+
+ def test_list_users_in_group_exact_filtered(self):
+ hints = driver_hints.Hints()
+ user_list, group = self._list_users_in_group_data()
+ hints.add_filter('name', 'Arthur Rimbaud', comparator='equals')
+ users = self.identity_api.list_users_in_group(group['id'], hints=hints)
+ self.assertEqual(1, len(users))
+ self.assertEqual(user_list[3]['id'], users[0]['id'])
+ self._delete_test_data('user', user_list)
+ self._delete_entity('group')(group['id'])
+
+
+class LimitTests(filtering.FilterTests):
+ ENTITIES = ['user', 'group', 'project']
+
+ def setUp(self):
+ """Setup for Limit Test Cases."""
+ self.entity_lists = {}
+
+ for entity in self.ENTITIES:
+ # Create 20 entities
+ self.entity_lists[entity] = self._create_test_data(entity, 20)
+ self.addCleanup(self.clean_up_entities)
+
+ def clean_up_entities(self):
+ """Clean up entity test data from Limit Test Cases."""
+ for entity in self.ENTITIES:
+ self._delete_test_data(entity, self.entity_lists[entity])
+ del self.entity_lists
+
+ def _test_list_entity_filtered_and_limited(self, entity):
+ self.config_fixture.config(list_limit=10)
+ # Should get back just 10 entities
+ hints = driver_hints.Hints()
+ entities = self._list_entities(entity)(hints=hints)
+ self.assertEqual(hints.limit['limit'], len(entities))
+ self.assertTrue(hints.limit['truncated'])
+
+ # Override with driver specific limit
+ if entity == 'project':
+ self.config_fixture.config(group='resource', list_limit=5)
+ else:
+ self.config_fixture.config(group='identity', list_limit=5)
+
+ # Should get back just 5 users
+ hints = driver_hints.Hints()
+ entities = self._list_entities(entity)(hints=hints)
+ self.assertEqual(hints.limit['limit'], len(entities))
+
+ # Finally, let's pretend we want to get the full list of entities,
+ # even with the limits set, as part of some internal calculation.
+ # Calling the API without a hints list should achieve this, and
+ # return at least the 20 entries we created (there may be other
+ # entities lying around created by other tests/setup).
+ entities = self._list_entities(entity)()
+ self.assertTrue(len(entities) >= 20)
+ self._match_with_list(self.entity_lists[entity], entities)
+
+ def test_list_users_filtered_and_limited(self):
+ self._test_list_entity_filtered_and_limited('user')
+
+ def test_list_groups_filtered_and_limited(self):
+ self._test_list_entity_filtered_and_limited('group')
+
+ def test_list_projects_filtered_and_limited(self):
+ self._test_list_entity_filtered_and_limited('project')
diff --git a/keystone-moon/keystone/tests/unit/identity/test_controllers.py b/keystone-moon/keystone/tests/unit/identity/test_controllers.py
new file mode 100644
index 00000000..ed2fe3ff
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/identity/test_controllers.py
@@ -0,0 +1,65 @@
+# Copyright 2016 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+
+from keystone import exception
+from keystone.identity import controllers
+from keystone.tests import unit
+from keystone.tests.unit.ksfixtures import database
+
+
+CONF = cfg.CONF
+
+_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}}
+
+
+class UserTestCaseNoDefaultDomain(unit.TestCase):
+
+ def setUp(self):
+ super(UserTestCaseNoDefaultDomain, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+ self.user_controller = controllers.User()
+
+ def test_setup(self):
+ # Other tests in this class assume there's no default domain, so make
+ # sure the setUp worked as expected.
+ self.assertRaises(
+ exception.DomainNotFound,
+ self.resource_api.get_domain, CONF.identity.default_domain_id)
+
+ def test_get_users(self):
+ # When list_users is done and there's no default domain, the result is
+ # an empty list.
+ res = self.user_controller.get_users(_ADMIN_CONTEXT)
+ self.assertEqual([], res['users'])
+
+ def test_get_user_by_name(self):
+ # When get_user_by_name is done and there's no default domain, the
+ # result is 404 Not Found
+ user_name = uuid.uuid4().hex
+ self.assertRaises(
+ exception.UserNotFound,
+ self.user_controller.get_user_by_name, _ADMIN_CONTEXT, user_name)
+
+ def test_create_user(self):
+ # When a user is created using the v2 controller and there's no default
+ # domain, it doesn't fail with can't find domain (a default domain is
+ # created)
+ user = {'name': uuid.uuid4().hex}
+ self.user_controller.create_user(_ADMIN_CONTEXT, user)
+ # If the above doesn't fail then this is successful.
diff --git a/keystone-moon/keystone/tests/unit/identity/test_core.py b/keystone-moon/keystone/tests/unit/identity/test_core.py
index e9845401..39f3c701 100644
--- a/keystone-moon/keystone/tests/unit/identity/test_core.py
+++ b/keystone-moon/keystone/tests/unit/identity/test_core.py
@@ -138,7 +138,7 @@ class TestDatabaseDomainConfigs(unit.TestCase):
def test_loading_config_from_database(self):
self.config_fixture.config(domain_configurations_from_database=True,
group='identity')
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
# Override two config options for our domain
conf = {'ldap': {'url': uuid.uuid4().hex,
@@ -165,7 +165,7 @@ class TestDatabaseDomainConfigs(unit.TestCase):
# Now turn off using database domain configuration and check that the
# default config file values are now seen instead of the overrides.
CONF.set_override('domain_configurations_from_database', False,
- 'identity')
+ 'identity', enforce_type=True)
domain_config = identity.DomainConfigs()
domain_config.setup_domain_drivers(fake_standard_driver,
self.resource_api)
diff --git a/keystone-moon/keystone/tests/unit/identity_mapping.py b/keystone-moon/keystone/tests/unit/identity_mapping.py
index 7fb8063f..4ba4f0c2 100644
--- a/keystone-moon/keystone/tests/unit/identity_mapping.py
+++ b/keystone-moon/keystone/tests/unit/identity_mapping.py
@@ -17,7 +17,6 @@ from keystone.identity.mapping_backends import sql as mapping_sql
def list_id_mappings():
"""List all id_mappings for testing purposes."""
-
- a_session = sql.get_session()
- refs = a_session.query(mapping_sql.IDMapping).all()
- return [x.to_dict() for x in refs]
+ with sql.session_for_read() as session:
+ refs = session.query(mapping_sql.IDMapping).all()
+ return [x.to_dict() for x in refs]
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py b/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py
index 81b80298..4b914752 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py
@@ -11,5 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from keystone.tests.unit.ksfixtures.auth_plugins import ConfigAuthPlugins # noqa
from keystone.tests.unit.ksfixtures.cache import Cache # noqa
from keystone.tests.unit.ksfixtures.key_repository import KeyRepository # noqa
+from keystone.tests.unit.ksfixtures.policy import Policy # noqa
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py b/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py
index ea1e6255..a23b804f 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py
@@ -29,8 +29,7 @@ ADMIN = 'admin'
class AppServer(fixtures.Fixture):
- """A fixture for managing an application server instance.
- """
+ """A fixture for managing an application server instance."""
def __init__(self, config, name, cert=None, key=None, ca=None,
cert_required=False, host='127.0.0.1', port=0):
@@ -72,7 +71,8 @@ class AppServer(fixtures.Fixture):
def _update_config_opt(self):
"""Updates the config with the actual port used."""
opt_name = self._get_config_option_for_section_name()
- CONF.set_override(opt_name, self.port, group='eventlet_server')
+ CONF.set_override(opt_name, self.port, group='eventlet_server',
+ enforce_type=True)
def _get_config_option_for_section_name(self):
"""Maps Paster config section names to port option names."""
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py b/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py
new file mode 100644
index 00000000..68ba6f3a
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py
@@ -0,0 +1,34 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import fixtures
+
+from keystone.common import config as common_cfg
+
+
+class ConfigAuthPlugins(fixtures.Fixture):
+ """A fixture for setting up and tearing down a auth plugins."""
+
+ def __init__(self, config_fixture, methods, **method_classes):
+ super(ConfigAuthPlugins, self).__init__()
+ self.methods = methods
+ self.config_fixture = config_fixture
+ self.method_classes = method_classes
+
+ def setUp(self):
+ super(ConfigAuthPlugins, self).setUp()
+ if self.methods:
+ self.config_fixture.config(group='auth', methods=self.methods)
+ common_cfg.setup_authentication()
+ if self.method_classes:
+ self.config_fixture.config(group='auth', **self.method_classes)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/cache.py b/keystone-moon/keystone/tests/unit/ksfixtures/cache.py
index 74566f1e..e0833ae2 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/cache.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/cache.py
@@ -13,11 +13,17 @@
import fixtures
+from keystone import catalog
from keystone.common import cache
+CACHE_REGIONS = (cache.CACHE_REGION, catalog.COMPUTED_CATALOG_REGION)
+
+
class Cache(fixtures.Fixture):
- """A fixture for setting up and tearing down the cache between test cases.
+ """A fixture for setting up the cache between test cases.
+
+ This will also tear down an existing cache if one is already configured.
"""
def setUp(self):
@@ -29,8 +35,9 @@ class Cache(fixtures.Fixture):
# NOTE(morganfainberg): The only way to reconfigure the CacheRegion
# object on each setUp() call is to remove the .backend property.
- if cache.REGION.is_configured:
- del cache.REGION.backend
+ for region in CACHE_REGIONS:
+ if region.is_configured:
+ del region.backend
- # ensure the cache region instance is setup
- cache.configure_cache_region(cache.REGION)
+ # ensure the cache region instance is setup
+ cache.configure_cache(region=region)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/database.py b/keystone-moon/keystone/tests/unit/ksfixtures/database.py
index 6f23a99d..52c35cee 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/database.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/database.py
@@ -28,12 +28,13 @@ CONF = cfg.CONF
def run_once(f):
"""A decorator to ensure the decorated function is only executed once.
- The decorated function cannot expect any arguments.
+ The decorated function is assumed to have a one parameter.
+
"""
@functools.wraps(f)
- def wrapper():
+ def wrapper(one):
if not wrapper.already_ran:
- f()
+ f(one)
wrapper.already_ran = True
wrapper.already_ran = False
return wrapper
@@ -51,7 +52,7 @@ def initialize_sql_session():
@run_once
-def _load_sqlalchemy_models():
+def _load_sqlalchemy_models(version_specifiers):
"""Find all modules containing SQLAlchemy models and import them.
This creates more consistent, deterministic test runs because tables
@@ -66,6 +67,24 @@ def _load_sqlalchemy_models():
as more models are imported. Importing all models at the start of
the test run avoids this problem.
+ version_specifiers is a dict that contains any specific driver versions
+ that have been requested. The dict is of the form:
+
+ {<module_name> : {'versioned_backend' : <name of backend requested>,
+ 'versionless_backend' : <name of default backend>}
+ }
+
+ For example:
+
+ {'keystone.assignment': {'versioned_backend' : 'V8_backends',
+ 'versionless_backend' : 'backends'},
+ 'keystone.identity': {'versioned_backend' : 'V9_backends',
+ 'versionless_backend' : 'backends'}
+ }
+
+ The version_specifiers will be used to load the correct driver. The
+ algorithm for this assumes that versioned drivers begin in 'V'.
+
"""
keystone_root = os.path.normpath(os.path.join(
os.path.dirname(__file__), '..', '..', '..'))
@@ -78,25 +97,59 @@ def _load_sqlalchemy_models():
# The root will be prefixed with an instance of os.sep, which will
# make the root after replacement '.<root>', the 'keystone' part
# of the module path is always added to the front
- module_name = ('keystone.%s.sql' %
+ module_root = ('keystone.%s' %
root.replace(os.sep, '.').lstrip('.'))
+ module_components = module_root.split('.')
+ module_without_backends = ''
+ for x in range(0, len(module_components) - 1):
+ module_without_backends += module_components[x] + '.'
+ module_without_backends = module_without_backends.rstrip('.')
+ this_backend = module_components[len(module_components) - 1]
+
+ # At this point module_without_backends might be something like
+ # 'keystone.assignment', while this_backend might be something
+ # 'V8_backends'.
+
+ if module_without_backends.startswith('keystone.contrib'):
+ # All the sql modules have now been moved into the core tree
+ # so no point in loading these again here (and, in fact, doing
+ # so might break trying to load a versioned driver.
+ continue
+
+ if module_without_backends in version_specifiers:
+ # OK, so there is a request for a specific version of this one.
+ # We therefore should skip any other versioned backend as well
+ # as the non-versioned one.
+ version = version_specifiers[module_without_backends]
+ if ((this_backend != version['versioned_backend'] and
+ this_backend.startswith('V')) or
+ this_backend == version['versionless_backend']):
+ continue
+ else:
+ # No versioned driver requested, so ignore any that are
+ # versioned
+ if this_backend.startswith('V'):
+ continue
+
+ module_name = module_root + '.sql'
__import__(module_name)
class Database(fixtures.Fixture):
- """A fixture for setting up and tearing down a database.
-
- """
+ """A fixture for setting up and tearing down a database."""
- def __init__(self):
+ def __init__(self, version_specifiers=None):
super(Database, self).__init__()
initialize_sql_session()
- _load_sqlalchemy_models()
+ if version_specifiers is None:
+ version_specifiers = {}
+ _load_sqlalchemy_models(version_specifiers)
def setUp(self):
super(Database, self).setUp()
- self.engine = sql.get_engine()
+ with sql.session_for_write() as session:
+ self.engine = session.get_bind()
self.addCleanup(sql.cleanup)
sql.ModelBase.metadata.create_all(bind=self.engine)
self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
index 918087ad..9977b206 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py
@@ -112,73 +112,6 @@ class HackingCode(fixtures.Fixture):
(8, 8, 'K004'),
]}
- assert_no_translations_for_debug_logging = {
- 'code': """
- import logging
- import logging as stlib_logging
- from keystone.i18n import _
- from keystone.i18n import _ as oslo_i18n
- from oslo_log import log
- from oslo_log import log as oslo_logging
-
- # stdlib logging
- L0 = logging.getLogger()
- L0.debug(_('text'))
- class C:
- def __init__(self):
- L0.debug(oslo_i18n('text', {}))
-
- # stdlib logging w/ alias and specifying a logger
- class C:
- def __init__(self):
- self.L1 = logging.getLogger(__name__)
- def m(self):
- self.L1.debug(
- _('text'), {}
- )
-
- # oslo logging and specifying a logger
- L2 = logging.getLogger(__name__)
- L2.debug(oslo_i18n('text'))
-
- # oslo logging w/ alias
- class C:
- def __init__(self):
- self.L3 = oslo_logging.getLogger()
- self.L3.debug(_('text'))
-
- # translation on a separate line
- msg = _('text')
- L2.debug(msg)
-
- # this should not fail
- if True:
- msg = _('message %s') % X
- L2.error(msg)
- raise TypeError(msg)
- if True:
- msg = 'message'
- L2.debug(msg)
-
- # this should not fail
- if True:
- if True:
- msg = _('message')
- else:
- msg = _('message')
- L2.debug(msg)
- raise Exception(msg)
- """,
- 'expected_errors': [
- (10, 9, 'K005'),
- (13, 17, 'K005'),
- (21, 12, 'K005'),
- (26, 9, 'K005'),
- (32, 22, 'K005'),
- (36, 9, 'K005'),
- ]
- }
-
dict_constructor = {
'code': """
lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
@@ -219,12 +152,12 @@ class HackingLogging(fixtures.Fixture):
LOG.info(_('text'))
class C:
def __init__(self):
- LOG.warn(oslo_i18n('text', {}))
- LOG.warn(_LW('text', {}))
+ LOG.warning(oslo_i18n('text', {}))
+ LOG.warning(_LW('text', {}))
""",
'expected_errors': [
(3, 9, 'K006'),
- (6, 17, 'K006'),
+ (6, 20, 'K006'),
],
},
{
@@ -287,13 +220,13 @@ class HackingLogging(fixtures.Fixture):
LOG = logging.getLogger()
# ensure the correct helper is being used
- LOG.warn(_LI('this should cause an error'))
+ LOG.warning(_LI('this should cause an error'))
# debug should not allow any helpers either
LOG.debug(_LI('this should cause an error'))
""",
'expected_errors': [
- (4, 9, 'K006'),
+ (4, 12, 'K006'),
(7, 10, 'K005'),
],
},
@@ -302,7 +235,7 @@ class HackingLogging(fixtures.Fixture):
# this should not be an error
L = log.getLogger(__name__)
msg = _('text')
- L.warn(msg)
+ L.warning(msg)
raise Exception(msg)
""",
'expected_errors': [],
@@ -312,7 +245,7 @@ class HackingLogging(fixtures.Fixture):
L = log.getLogger(__name__)
def f():
msg = _('text')
- L2.warn(msg)
+ L2.warning(msg)
something = True # add an extra statement here
raise Exception(msg)
""",
@@ -323,11 +256,11 @@ class HackingLogging(fixtures.Fixture):
LOG = log.getLogger(__name__)
def func():
msg = _('text')
- LOG.warn(msg)
+ LOG.warning(msg)
raise Exception('some other message')
""",
'expected_errors': [
- (4, 13, 'K006'),
+ (4, 16, 'K006'),
],
},
{
@@ -337,7 +270,7 @@ class HackingLogging(fixtures.Fixture):
msg = _('text')
else:
msg = _('text')
- LOG.warn(msg)
+ LOG.warning(msg)
raise Exception(msg)
""",
'expected_errors': [
@@ -350,28 +283,28 @@ class HackingLogging(fixtures.Fixture):
msg = _('text')
else:
msg = _('text')
- LOG.warn(msg)
+ LOG.warning(msg)
""",
'expected_errors': [
- (6, 9, 'K006'),
+ (6, 12, 'K006'),
],
},
{
'code': """
LOG = log.getLogger(__name__)
msg = _LW('text')
- LOG.warn(msg)
+ LOG.warning(msg)
raise Exception(msg)
""",
'expected_errors': [
- (3, 9, 'K007'),
+ (3, 12, 'K007'),
],
},
{
'code': """
LOG = log.getLogger(__name__)
msg = _LW('text')
- LOG.warn(msg)
+ LOG.warning(msg)
msg = _('something else')
raise Exception(msg)
""",
@@ -381,18 +314,18 @@ class HackingLogging(fixtures.Fixture):
'code': """
LOG = log.getLogger(__name__)
msg = _LW('hello %s') % 'world'
- LOG.warn(msg)
+ LOG.warning(msg)
raise Exception(msg)
""",
'expected_errors': [
- (3, 9, 'K007'),
+ (3, 12, 'K007'),
],
},
{
'code': """
LOG = log.getLogger(__name__)
msg = _LW('hello %s') % 'world'
- LOG.warn(msg)
+ LOG.warning(msg)
""",
'expected_errors': [],
},
@@ -409,3 +342,76 @@ class HackingLogging(fixtures.Fixture):
'expected_errors': [],
},
]
+
+ assert_not_using_deprecated_warn = {
+ 'code': """
+ # Logger.warn has been deprecated in Python3 in favor of
+ # Logger.warning
+ LOG = log.getLogger(__name__)
+ LOG.warn(_LW('text'))
+ """,
+ 'expected_errors': [
+ (4, 9, 'K009'),
+ ],
+ }
+
+ assert_no_translations_for_debug_logging = {
+ 'code': """
+ # stdlib logging
+ L0 = logging.getLogger()
+ L0.debug(_('text'))
+ class C:
+ def __init__(self):
+ L0.debug(oslo_i18n('text', {}))
+
+ # stdlib logging w/ alias and specifying a logger
+ class C:
+ def __init__(self):
+ self.L1 = logging.getLogger(__name__)
+ def m(self):
+ self.L1.debug(
+ _('text'), {}
+ )
+
+ # oslo logging and specifying a logger
+ L2 = logging.getLogger(__name__)
+ L2.debug(oslo_i18n('text'))
+
+ # oslo logging w/ alias
+ class C:
+ def __init__(self):
+ self.L3 = oslo_logging.getLogger()
+ self.L3.debug(_('text'))
+
+ # translation on a separate line
+ msg = _('text')
+ L2.debug(msg)
+
+ # this should not fail
+ if True:
+ msg = _('message %s') % X
+ L2.error(msg)
+ raise TypeError(msg)
+ if True:
+ msg = 'message'
+ L2.debug(msg)
+
+ # this should not fail
+ if True:
+ if True:
+ msg = _('message')
+ else:
+ msg = _('message')
+ L2.debug(msg)
+ raise Exception(msg)
+ """,
+ 'expected_errors': [
+ (3, 9, 'K005'),
+ (6, 17, 'K005'),
+ (14, 12, 'K005'),
+ (19, 9, 'K005'),
+ (25, 22, 'K005'),
+ (29, 9, 'K005'),
+ ]
+ }
+
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py b/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py
index b2cbe067..6cd8cc0b 100644
--- a/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py
@@ -19,8 +19,7 @@ from keystone.tests.unit import fakeldap
class LDAPDatabase(fixtures.Fixture):
- """A fixture for setting up and tearing down an LDAP database.
- """
+ """A fixture for setting up and tearing down an LDAP database."""
def setUp(self):
super(LDAPDatabase, self).setUp()
diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/policy.py b/keystone-moon/keystone/tests/unit/ksfixtures/policy.py
new file mode 100644
index 00000000..b883f980
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/ksfixtures/policy.py
@@ -0,0 +1,33 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import fixtures
+from oslo_policy import opts
+
+from keystone.policy.backends import rules
+
+
+class Policy(fixtures.Fixture):
+ """A fixture for working with policy configuration."""
+
+ def __init__(self, policy_file, config_fixture):
+ self._policy_file = policy_file
+ self._config_fixture = config_fixture
+
+ def setUp(self):
+ super(Policy, self).setUp()
+ opts.set_defaults(self._config_fixture.conf)
+ self._config_fixture.config(group='oslo_policy',
+ policy_file=self._policy_file)
+ rules.init()
+ self.addCleanup(rules.reset)
diff --git a/keystone-moon/keystone/tests/unit/mapping_fixtures.py b/keystone-moon/keystone/tests/unit/mapping_fixtures.py
index 94b07133..9dc980aa 100644
--- a/keystone-moon/keystone/tests/unit/mapping_fixtures.py
+++ b/keystone-moon/keystone/tests/unit/mapping_fixtures.py
@@ -1,3 +1,5 @@
+# -*- coding: utf-8 -*-
+
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@@ -463,6 +465,30 @@ MAPPING_TESTER_REGEX = {
]
}
+
+MAPPING_DIRECT_MAPPING_THROUGH_KEYWORD = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": "{0}"
+ },
+ {
+ "group": TESTER_GROUP_ID
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName",
+ "any_one_of": [
+ "bwilliams"
+ ]
+ }
+ ]
+ }
+ ]
+}
+
MAPPING_DEVELOPER_REGEX = {
"rules": [
{
@@ -760,7 +786,7 @@ MAPPING_GROUPS_BLACKLIST = {
]
}
-# Excercise all possibilities of user identitfication. Values are hardcoded on
+# Exercise all possibilities of user identification. Values are hardcoded on
# purpose.
MAPPING_USER_IDS = {
"rules": [
@@ -1036,6 +1062,78 @@ MAPPING_WITH_DOMAINID_ONLY = {
]
}
+MAPPING_GROUPS_IDS_WHITELIST = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}"
+ }
+ },
+ {
+ "group_ids": "{1}"
+ },
+ {
+ "group": {
+ "id": "{2}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "name"
+ },
+ {
+ "type": "group_ids",
+ "whitelist": [
+ "abc123", "ghi789", "321cba"
+ ]
+ },
+ {
+ "type": "group"
+ }
+ ]
+ }
+ ]
+}
+
+MAPPING_GROUPS_IDS_BLACKLIST = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}"
+ }
+ },
+ {
+ "group_ids": "{1}"
+ },
+ {
+ "group": {
+ "id": "{2}"
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "name"
+ },
+ {
+ "type": "group_ids",
+ "blacklist": [
+ "def456"
+ ]
+ },
+ {
+ "type": "group"
+ }
+ ]
+ }
+ ]
+}
+
# Mapping used by tokenless test cases, it maps the domain_name only.
MAPPING_WITH_DOMAINNAME_ONLY = {
'rules': [
@@ -1184,6 +1282,26 @@ MAPPING_GROUPS_WHITELIST_PASS_THROUGH = {
]
}
+MAPPING_BAD_LOCAL_SETUP = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0}",
+ "domain": {"id": "default"}
+ },
+ "whatisthis": "local"
+ }
+ ],
+ "remote": [
+ {
+ "type": "UserName"
+ }
+ ]
+ }
+ ]
+}
EMPLOYEE_ASSERTION = {
'Email': 'tim@example.com',
@@ -1310,3 +1428,59 @@ UNMATCHED_GROUP_ASSERTION = {
'REMOTE_USER': 'Any Momoose',
'REMOTE_USER_GROUPS': 'EXISTS;NO_EXISTS'
}
+
+GROUP_IDS_ASSERTION = {
+ 'name': 'opilotte',
+ 'group_ids': 'abc123;def456;ghi789',
+ 'group': 'klm012'
+}
+
+GROUP_IDS_ASSERTION_ONLY_ONE_GROUP = {
+ 'name': 'opilotte',
+ 'group_ids': '321cba',
+ 'group': '210mlk'
+}
+
+UNICODE_NAME_ASSERTION = {
+ 'PFX_Email': 'jon@example.com',
+ 'PFX_UserName': 'jonkare',
+ 'PFX_FirstName': 'Jon KÃ¥re',
+ 'PFX_LastName': 'Hellån',
+ 'PFX_orgPersonType': 'Admin;Chief'
+}
+
+MAPPING_UNICODE = {
+ "rules": [
+ {
+ "local": [
+ {
+ "user": {
+ "name": "{0} {1}",
+ "email": "{2}"
+ },
+ "group": {
+ "id": EMPLOYEE_GROUP_ID
+ }
+ }
+ ],
+ "remote": [
+ {
+ "type": "PFX_FirstName"
+ },
+ {
+ "type": "PFX_LastName"
+ },
+ {
+ "type": "PFX_Email"
+ },
+ {
+ "type": "PFX_orgPersonType",
+ "any_one_of": [
+ "Admin",
+ "Big Cheese"
+ ]
+ }
+ ]
+ },
+ ],
+}
diff --git a/keystone-moon/keystone/tests/unit/policy/__init__.py b/keystone-moon/keystone/tests/unit/policy/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/policy/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/policy/test_backends.py b/keystone-moon/keystone/tests/unit/policy/test_backends.py
new file mode 100644
index 00000000..7b672420
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/policy/test_backends.py
@@ -0,0 +1,86 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystone import exception
+from keystone.tests import unit
+
+
+class PolicyTests(object):
+ def test_create(self):
+ ref = unit.new_policy_ref()
+ res = self.policy_api.create_policy(ref['id'], ref)
+ self.assertDictEqual(ref, res)
+
+ def test_get(self):
+ ref = unit.new_policy_ref()
+ res = self.policy_api.create_policy(ref['id'], ref)
+
+ res = self.policy_api.get_policy(ref['id'])
+ self.assertDictEqual(ref, res)
+
+ def test_list(self):
+ ref = unit.new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+
+ res = self.policy_api.list_policies()
+ res = [x for x in res if x['id'] == ref['id']][0]
+ self.assertDictEqual(ref, res)
+
+ def test_update(self):
+ ref = unit.new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+ orig = ref
+
+ ref = unit.new_policy_ref()
+
+ # (cannot change policy ID)
+ self.assertRaises(exception.ValidationError,
+ self.policy_api.update_policy,
+ orig['id'],
+ ref)
+
+ ref['id'] = orig['id']
+ res = self.policy_api.update_policy(orig['id'], ref)
+ self.assertDictEqual(ref, res)
+
+ def test_delete(self):
+ ref = unit.new_policy_ref()
+ self.policy_api.create_policy(ref['id'], ref)
+
+ self.policy_api.delete_policy(ref['id'])
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.delete_policy,
+ ref['id'])
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.get_policy,
+ ref['id'])
+ res = self.policy_api.list_policies()
+ self.assertFalse(len([x for x in res if x['id'] == ref['id']]))
+
+ def test_get_policy_returns_not_found(self):
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.get_policy,
+ uuid.uuid4().hex)
+
+ def test_update_policy_returns_not_found(self):
+ ref = unit.new_policy_ref()
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.update_policy,
+ ref['id'],
+ ref)
+
+ def test_delete_policy_returns_not_found(self):
+ self.assertRaises(exception.PolicyNotFound,
+ self.policy_api.delete_policy,
+ uuid.uuid4().hex)
diff --git a/keystone-moon/keystone/tests/unit/resource/__init__.py b/keystone-moon/keystone/tests/unit/resource/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/resource/backends/__init__.py b/keystone-moon/keystone/tests/unit/resource/backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/backends/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py b/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py
new file mode 100644
index 00000000..79ad3df2
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py
@@ -0,0 +1,24 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystone.resource.backends import sql
+from keystone.tests import unit
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit.resource import test_backends
+
+
+class TestSqlResourceDriver(unit.BaseTestCase,
+ test_backends.ResourceDriverTests):
+ def setUp(self):
+ super(TestSqlResourceDriver, self).setUp()
+ self.useFixture(database.Database())
+ self.driver = sql.Resource()
diff --git a/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py b/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py b/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py
new file mode 100644
index 00000000..b4c5f262
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py
@@ -0,0 +1,53 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from keystone.common import sql
+from keystone.resource.config_backends import sql as config_sql
+from keystone.tests import unit
+from keystone.tests.unit.backend import core_sql
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit.resource import test_core
+
+
+class SqlDomainConfigModels(core_sql.BaseBackendSqlModels):
+
+ def test_whitelisted_model(self):
+ cols = (('domain_id', sql.String, 64),
+ ('group', sql.String, 255),
+ ('option', sql.String, 255),
+ ('value', sql.JsonBlob, None))
+ self.assertExpectedSchema('whitelisted_config', cols)
+
+ def test_sensitive_model(self):
+ cols = (('domain_id', sql.String, 64),
+ ('group', sql.String, 255),
+ ('option', sql.String, 255),
+ ('value', sql.JsonBlob, None))
+ self.assertExpectedSchema('sensitive_config', cols)
+
+
+class SqlDomainConfigDriver(unit.BaseTestCase,
+ test_core.DomainConfigDriverTests):
+ def setUp(self):
+ super(SqlDomainConfigDriver, self).setUp()
+ self.useFixture(database.Database())
+ self.driver = config_sql.DomainConfig()
+
+
+class SqlDomainConfig(core_sql.BaseBackendSqlTests,
+ test_core.DomainConfigTests):
+ def setUp(self):
+ super(SqlDomainConfig, self).setUp()
+ # test_core.DomainConfigTests is effectively a mixin class, so make
+ # sure we call its setup
+ test_core.DomainConfigTests.setUp(self)
diff --git a/keystone-moon/keystone/tests/unit/resource/test_backends.py b/keystone-moon/keystone/tests/unit/resource/test_backends.py
new file mode 100644
index 00000000..eed4c6ba
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/test_backends.py
@@ -0,0 +1,1669 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+import mock
+from oslo_config import cfg
+from six.moves import range
+from testtools import matchers
+
+from keystone.common import driver_hints
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit import utils as test_utils
+
+
+CONF = cfg.CONF
+
+
+class ResourceTests(object):
+
+ domain_count = len(default_fixtures.DOMAINS)
+
+ def test_get_project(self):
+ tenant_ref = self.resource_api.get_project(self.tenant_bar['id'])
+ self.assertDictEqual(self.tenant_bar, tenant_ref)
+
+ def test_get_project_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ uuid.uuid4().hex)
+
+ def test_get_project_by_name(self):
+ tenant_ref = self.resource_api.get_project_by_name(
+ self.tenant_bar['name'],
+ CONF.identity.default_domain_id)
+ self.assertDictEqual(self.tenant_bar, tenant_ref)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_get_project_by_name_for_project_acting_as_a_domain(self):
+ """Tests get_project_by_name works when the domain_id is None."""
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id, is_domain=False)
+ project = self.resource_api.create_project(project['id'], project)
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project_by_name,
+ project['name'],
+ None)
+
+ # Test that querying with domain_id as None will find the project
+ # acting as a domain, even if it's name is the same as the regular
+ # project above.
+ project2 = unit.new_project_ref(is_domain=True,
+ name=project['name'])
+ project2 = self.resource_api.create_project(project2['id'], project2)
+
+ project_ref = self.resource_api.get_project_by_name(
+ project2['name'], None)
+
+ self.assertEqual(project2, project_ref)
+
+ def test_get_project_by_name_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project_by_name,
+ uuid.uuid4().hex,
+ CONF.identity.default_domain_id)
+
+ def test_create_duplicate_project_id_fails(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project_id = project['id']
+ self.resource_api.create_project(project_id, project)
+ project['name'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.resource_api.create_project,
+ project_id,
+ project)
+
+ def test_create_duplicate_project_name_fails(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project_id = project['id']
+ self.resource_api.create_project(project_id, project)
+ project['id'] = 'fake2'
+ self.assertRaises(exception.Conflict,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_create_duplicate_project_name_in_different_domains(self):
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+ project1 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project2 = unit.new_project_ref(name=project1['name'],
+ domain_id=new_domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ self.resource_api.create_project(project2['id'], project2)
+
+ def test_move_project_between_domains(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project['id'], project)
+ project['domain_id'] = domain2['id']
+ # Update the project asserting that a deprecation warning is emitted
+ with mock.patch(
+ 'oslo_log.versionutils.report_deprecated_feature') as mock_dep:
+ self.resource_api.update_project(project['id'], project)
+ self.assertTrue(mock_dep.called)
+
+ updated_project_ref = self.resource_api.get_project(project['id'])
+ self.assertEqual(domain2['id'], updated_project_ref['domain_id'])
+
+ def test_move_project_between_domains_with_clashing_names_fails(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ # First, create a project in domain1
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ # Now create a project in domain2 with a potentially clashing
+ # name - which should work since we have domain separation
+ project2 = unit.new_project_ref(name=project1['name'],
+ domain_id=domain2['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ # Now try and move project1 into the 2nd domain - which should
+ # fail since the names clash
+ project1['domain_id'] = domain2['id']
+ self.assertRaises(exception.Conflict,
+ self.resource_api.update_project,
+ project1['id'],
+ project1)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_move_project_with_children_between_domains_fails(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project['id'], project)
+ child_project = unit.new_project_ref(domain_id=domain1['id'],
+ parent_id=project['id'])
+ self.resource_api.create_project(child_project['id'], child_project)
+ project['domain_id'] = domain2['id']
+
+ # Update is not allowed, since updating the whole subtree would be
+ # necessary
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_move_project_not_root_between_domains_fails(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ project = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project['id'], project)
+ child_project = unit.new_project_ref(domain_id=domain1['id'],
+ parent_id=project['id'])
+ self.resource_api.create_project(child_project['id'], child_project)
+ child_project['domain_id'] = domain2['id']
+
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ child_project['id'],
+ child_project)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_move_root_project_between_domains_succeeds(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+ root_project = unit.new_project_ref(domain_id=domain1['id'])
+ root_project = self.resource_api.create_project(root_project['id'],
+ root_project)
+
+ root_project['domain_id'] = domain2['id']
+ self.resource_api.update_project(root_project['id'], root_project)
+ project_from_db = self.resource_api.get_project(root_project['id'])
+
+ self.assertEqual(domain2['id'], project_from_db['domain_id'])
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_update_domain_id_project_is_domain_fails(self):
+ other_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(other_domain['id'], other_domain)
+ project = unit.new_project_ref(is_domain=True)
+ self.resource_api.create_project(project['id'], project)
+ project['domain_id'] = other_domain['id']
+
+ # Update of domain_id of projects acting as domains is not allowed
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ def test_rename_duplicate_project_name_fails(self):
+ project1 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project2 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project1['id'], project1)
+ self.resource_api.create_project(project2['id'], project2)
+ project2['name'] = project1['name']
+ self.assertRaises(exception.Error,
+ self.resource_api.update_project,
+ project2['id'],
+ project2)
+
+ def test_update_project_id_does_nothing(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project_id = project['id']
+ self.resource_api.create_project(project['id'], project)
+ project['id'] = 'fake2'
+ self.resource_api.update_project(project_id, project)
+ project_ref = self.resource_api.get_project(project_id)
+ self.assertEqual(project_id, project_ref['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ 'fake2')
+
+ def test_delete_domain_with_user_group_project_links(self):
+ # TODO(chungg):add test case once expected behaviour defined
+ pass
+
+ def test_update_project_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.update_project,
+ uuid.uuid4().hex,
+ dict())
+
+ def test_delete_project_returns_not_found(self):
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.delete_project,
+ uuid.uuid4().hex)
+
+ def test_create_update_delete_unicode_project(self):
+ unicode_project_name = u'name \u540d\u5b57'
+ project = unit.new_project_ref(
+ name=unicode_project_name,
+ domain_id=CONF.identity.default_domain_id)
+ project = self.resource_api.create_project(project['id'], project)
+ self.resource_api.update_project(project['id'], project)
+ self.resource_api.delete_project(project['id'])
+
+ def test_create_project_with_no_enabled_field(self):
+ ref = unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
+ del ref['enabled']
+ self.resource_api.create_project(ref['id'], ref)
+
+ project = self.resource_api.get_project(ref['id'])
+ self.assertIs(project['enabled'], True)
+
+ def test_create_project_long_name_fails(self):
+ project = unit.new_project_ref(
+ name='a' * 65, domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_create_project_blank_name_fails(self):
+ project = unit.new_project_ref(
+ name='', domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_create_project_invalid_name_fails(self):
+ project = unit.new_project_ref(
+ name=None, domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+ project = unit.new_project_ref(
+ name=123, domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_update_project_blank_name_fails(self):
+ project = unit.new_project_ref(
+ name='fake1', domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project['id'], project)
+ project['name'] = ''
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ def test_update_project_long_name_fails(self):
+ project = unit.new_project_ref(
+ name='fake1', domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project['id'], project)
+ project['name'] = 'a' * 65
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ def test_update_project_invalid_name_fails(self):
+ project = unit.new_project_ref(
+ name='fake1', domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project['id'], project)
+ project['name'] = None
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ project['name'] = 123
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ def test_update_project_invalid_enabled_type_string(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertTrue(project_ref['enabled'])
+
+ # Strings are not valid boolean values
+ project['enabled'] = "false"
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ project['id'],
+ project)
+
+ def test_create_project_invalid_enabled_type_string(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ # invalid string value
+ enabled="true")
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_create_project_invalid_domain_id(self):
+ project = unit.new_project_ref(domain_id=uuid.uuid4().hex)
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_list_domains(self):
+ domain1 = unit.new_domain_ref()
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ self.resource_api.create_domain(domain2['id'], domain2)
+ domains = self.resource_api.list_domains()
+ self.assertEqual(3, len(domains))
+ domain_ids = []
+ for domain in domains:
+ domain_ids.append(domain.get('id'))
+ self.assertIn(CONF.identity.default_domain_id, domain_ids)
+ self.assertIn(domain1['id'], domain_ids)
+ self.assertIn(domain2['id'], domain_ids)
+
+ def test_list_projects(self):
+ project_refs = self.resource_api.list_projects()
+ project_count = len(default_fixtures.TENANTS) + self.domain_count
+ self.assertEqual(project_count, len(project_refs))
+ for project in default_fixtures.TENANTS:
+ self.assertIn(project, project_refs)
+
+ def test_list_projects_with_multiple_filters(self):
+ # Create a project
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project = self.resource_api.create_project(project['id'], project)
+
+ # Build driver hints with the project's name and inexistent description
+ hints = driver_hints.Hints()
+ hints.add_filter('name', project['name'])
+ hints.add_filter('description', uuid.uuid4().hex)
+
+ # Retrieve projects based on hints and check an empty list is returned
+ projects = self.resource_api.list_projects(hints)
+ self.assertEqual([], projects)
+
+ # Build correct driver hints
+ hints = driver_hints.Hints()
+ hints.add_filter('name', project['name'])
+ hints.add_filter('description', project['description'])
+
+ # Retrieve projects based on hints
+ projects = self.resource_api.list_projects(hints)
+
+ # Check that the returned list contains only the first project
+ self.assertEqual(1, len(projects))
+ self.assertEqual(project, projects[0])
+
+ def test_list_projects_for_domain(self):
+ project_ids = ([x['id'] for x in
+ self.resource_api.list_projects_in_domain(
+ CONF.identity.default_domain_id)])
+ # Only the projects from the default fixtures are expected, since
+ # filtering by domain does not include any project that acts as a
+ # domain.
+ self.assertThat(
+ project_ids, matchers.HasLength(len(default_fixtures.TENANTS)))
+ self.assertIn(self.tenant_bar['id'], project_ids)
+ self.assertIn(self.tenant_baz['id'], project_ids)
+ self.assertIn(self.tenant_mtu['id'], project_ids)
+ self.assertIn(self.tenant_service['id'], project_ids)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_list_projects_acting_as_domain(self):
+ initial_domains = self.resource_api.list_domains()
+
+ # Creating 5 projects that act as domains
+ new_projects_acting_as_domains = []
+ for i in range(5):
+ project = unit.new_project_ref(is_domain=True)
+ project = self.resource_api.create_project(project['id'], project)
+ new_projects_acting_as_domains.append(project)
+
+ # Creating a few regular project to ensure it doesn't mess with the
+ # ones that act as domains
+ self._create_projects_hierarchy(hierarchy_size=2)
+
+ projects = self.resource_api.list_projects_acting_as_domain()
+ expected_number_projects = (
+ len(initial_domains) + len(new_projects_acting_as_domains))
+ self.assertEqual(expected_number_projects, len(projects))
+ for project in new_projects_acting_as_domains:
+ self.assertIn(project, projects)
+ for domain in initial_domains:
+ self.assertIn(domain['id'], [p['id'] for p in projects])
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_list_projects_for_alternate_domain(self):
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = unit.new_project_ref(domain_id=domain1['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ project_ids = ([x['id'] for x in
+ self.resource_api.list_projects_in_domain(
+ domain1['id'])])
+ self.assertEqual(2, len(project_ids))
+ self.assertIn(project1['id'], project_ids)
+ self.assertIn(project2['id'], project_ids)
+
+ def _create_projects_hierarchy(self, hierarchy_size=2,
+ domain_id=None,
+ is_domain=False,
+ parent_project_id=None):
+ """Creates a project hierarchy with specified size.
+
+ :param hierarchy_size: the desired hierarchy size, default is 2 -
+ a project with one child.
+ :param domain_id: domain where the projects hierarchy will be created.
+ :param is_domain: if the hierarchy will have the is_domain flag active
+ or not.
+ :param parent_project_id: if the intention is to create a
+ sub-hierarchy, sets the sub-hierarchy root. Defaults to creating
+ a new hierarchy, i.e. a new root project.
+
+ :returns projects: a list of the projects in the created hierarchy.
+
+ """
+ if domain_id is None:
+ domain_id = CONF.identity.default_domain_id
+ if parent_project_id:
+ project = unit.new_project_ref(parent_id=parent_project_id,
+ domain_id=domain_id,
+ is_domain=is_domain)
+ else:
+ project = unit.new_project_ref(domain_id=domain_id,
+ is_domain=is_domain)
+ project_id = project['id']
+ project = self.resource_api.create_project(project_id, project)
+
+ projects = [project]
+ for i in range(1, hierarchy_size):
+ new_project = unit.new_project_ref(parent_id=project_id,
+ domain_id=domain_id)
+
+ self.resource_api.create_project(new_project['id'], new_project)
+ projects.append(new_project)
+ project_id = new_project['id']
+
+ return projects
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_create_domain_with_project_api(self):
+ project = unit.new_project_ref(is_domain=True)
+ ref = self.resource_api.create_project(project['id'], project)
+ self.assertTrue(ref['is_domain'])
+ self.resource_api.get_domain(ref['id'])
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_project_as_a_domain_uniqueness_constraints(self):
+ """Tests project uniqueness for those acting as domains.
+
+ If it is a project acting as a domain, we can't have two or more with
+ the same name.
+
+ """
+ # Create two projects acting as a domain
+ project = unit.new_project_ref(is_domain=True)
+ project = self.resource_api.create_project(project['id'], project)
+ project2 = unit.new_project_ref(is_domain=True)
+ project2 = self.resource_api.create_project(project2['id'], project2)
+
+ # All projects acting as domains have a null domain_id, so should not
+ # be able to create another with the same name but a different
+ # project ID.
+ new_project = project.copy()
+ new_project['id'] = uuid.uuid4().hex
+
+ self.assertRaises(exception.Conflict,
+ self.resource_api.create_project,
+ new_project['id'],
+ new_project)
+
+ # We also should not be able to update one to have a name clash
+ project2['name'] = project['name']
+ self.assertRaises(exception.Conflict,
+ self.resource_api.update_project,
+ project2['id'],
+ project2)
+
+ # But updating it to a unique name is OK
+ project2['name'] = uuid.uuid4().hex
+ self.resource_api.update_project(project2['id'], project2)
+
+ # Finally, it should be OK to create a project with same name as one of
+ # these acting as a domain, as long as it is a regular project
+ project3 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id, name=project2['name'])
+ self.resource_api.create_project(project3['id'], project3)
+ # In fact, it should be OK to create such a project in the domain which
+ # has the matching name.
+ # TODO(henry-nash): Once we fully support projects acting as a domain,
+ # add a test here to create a sub-project with a name that matches its
+ # project acting as a domain
+
+ @unit.skip_if_no_multiple_domains_support
+ @test_utils.wip('waiting for sub projects acting as domains support')
+ def test_is_domain_sub_project_has_parent_domain_id(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id, is_domain=True)
+ self.resource_api.create_project(project['id'], project)
+
+ sub_project = unit.new_project_ref(domain_id=project['id'],
+ parent_id=project['id'],
+ is_domain=True)
+
+ ref = self.resource_api.create_project(sub_project['id'], sub_project)
+ self.assertTrue(ref['is_domain'])
+ self.assertEqual(project['id'], ref['parent_id'])
+ self.assertEqual(project['id'], ref['domain_id'])
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_delete_domain_with_project_api(self):
+ project = unit.new_project_ref(domain_id=None,
+ is_domain=True)
+ self.resource_api.create_project(project['id'], project)
+
+ # Check that a corresponding domain was created
+ self.resource_api.get_domain(project['id'])
+
+ # Try to delete the enabled project that acts as a domain
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.delete_project,
+ project['id'])
+
+ # Disable the project
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
+
+ # Successfully delete the project
+ self.resource_api.delete_project(project['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ project['id'])
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_create_subproject_acting_as_domain_fails(self):
+ root_project = unit.new_project_ref(is_domain=True)
+ self.resource_api.create_project(root_project['id'], root_project)
+
+ sub_project = unit.new_project_ref(is_domain=True,
+ parent_id=root_project['id'])
+
+ # Creation of sub projects acting as domains is not allowed yet
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ sub_project['id'], sub_project)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_create_domain_under_regular_project_hierarchy_fails(self):
+ # Projects acting as domains can't have a regular project as parent
+ projects_hierarchy = self._create_projects_hierarchy()
+ parent = projects_hierarchy[1]
+ project = unit.new_project_ref(domain_id=parent['id'],
+ parent_id=parent['id'],
+ is_domain=True)
+
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project['id'], project)
+
+ @unit.skip_if_no_multiple_domains_support
+ @test_utils.wip('waiting for sub projects acting as domains support')
+ def test_create_project_under_domain_hierarchy(self):
+ projects_hierarchy = self._create_projects_hierarchy(is_domain=True)
+ parent = projects_hierarchy[1]
+ project = unit.new_project_ref(domain_id=parent['id'],
+ parent_id=parent['id'],
+ is_domain=False)
+
+ ref = self.resource_api.create_project(project['id'], project)
+ self.assertFalse(ref['is_domain'])
+ self.assertEqual(parent['id'], ref['parent_id'])
+ self.assertEqual(parent['id'], ref['domain_id'])
+
+ def test_create_project_without_is_domain_flag(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ del project['is_domain']
+ ref = self.resource_api.create_project(project['id'], project)
+ # The is_domain flag should be False by default
+ self.assertFalse(ref['is_domain'])
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_create_project_passing_is_domain_flag_true(self):
+ project = unit.new_project_ref(is_domain=True)
+
+ ref = self.resource_api.create_project(project['id'], project)
+ self.assertTrue(ref['is_domain'])
+
+ def test_create_project_passing_is_domain_flag_false(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id, is_domain=False)
+
+ ref = self.resource_api.create_project(project['id'], project)
+ self.assertIs(False, ref['is_domain'])
+
+ @test_utils.wip('waiting for support for parent_id to imply domain_id')
+ def test_create_project_with_parent_id_and_without_domain_id(self):
+ # First create a domain
+ project = unit.new_project_ref(is_domain=True)
+ self.resource_api.create_project(project['id'], project)
+ # Now create a child by just naming the parent_id
+ sub_project = unit.new_project_ref(parent_id=project['id'])
+ ref = self.resource_api.create_project(sub_project['id'], sub_project)
+
+ # The domain_id should be set to the parent domain_id
+ self.assertEqual(project['domain_id'], ref['domain_id'])
+
+ def test_create_project_with_domain_id_and_without_parent_id(self):
+ # First create a domain
+ project = unit.new_project_ref(is_domain=True)
+ self.resource_api.create_project(project['id'], project)
+ # Now create a child by just naming the domain_id
+ sub_project = unit.new_project_ref(domain_id=project['id'])
+ ref = self.resource_api.create_project(sub_project['id'], sub_project)
+
+ # The parent_id and domain_id should be set to the id of the project
+ # acting as a domain
+ self.assertEqual(project['id'], ref['parent_id'])
+ self.assertEqual(project['id'], ref['domain_id'])
+
+ def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
+ # First create a domain
+ project = unit.new_project_ref(is_domain=True)
+ self.resource_api.create_project(project['id'], project)
+ # Now try to create a child with the above as its parent, but
+ # specifying a different domain.
+ sub_project = unit.new_project_ref(
+ parent_id=project['id'], domain_id=CONF.identity.default_domain_id)
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ sub_project['id'], sub_project)
+
+ def test_check_leaf_projects(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ self.assertFalse(self.resource_api.is_leaf_project(
+ root_project['id']))
+ self.assertTrue(self.resource_api.is_leaf_project(
+ leaf_project['id']))
+
+ # Delete leaf_project
+ self.resource_api.delete_project(leaf_project['id'])
+
+ # Now, root_project should be leaf
+ self.assertTrue(self.resource_api.is_leaf_project(
+ root_project['id']))
+
+ def test_list_projects_in_subtree(self):
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ project1 = projects_hierarchy[0]
+ project2 = projects_hierarchy[1]
+ project3 = projects_hierarchy[2]
+ project4 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ parent_id=project2['id'])
+ self.resource_api.create_project(project4['id'], project4)
+
+ subtree = self.resource_api.list_projects_in_subtree(project1['id'])
+ self.assertEqual(3, len(subtree))
+ self.assertIn(project2, subtree)
+ self.assertIn(project3, subtree)
+ self.assertIn(project4, subtree)
+
+ subtree = self.resource_api.list_projects_in_subtree(project2['id'])
+ self.assertEqual(2, len(subtree))
+ self.assertIn(project3, subtree)
+ self.assertIn(project4, subtree)
+
+ subtree = self.resource_api.list_projects_in_subtree(project3['id'])
+ self.assertEqual(0, len(subtree))
+
+ def test_list_projects_in_subtree_with_circular_reference(self):
+ project1 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project1 = self.resource_api.create_project(project1['id'], project1)
+
+ project2 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ parent_id=project1['id'])
+ self.resource_api.create_project(project2['id'], project2)
+
+ project1['parent_id'] = project2['id'] # Adds cyclic reference
+
+ # NOTE(dstanek): The manager does not allow parent_id to be updated.
+ # Instead will directly use the driver to create the cyclic
+ # reference.
+ self.resource_api.driver.update_project(project1['id'], project1)
+
+ subtree = self.resource_api.list_projects_in_subtree(project1['id'])
+
+ # NOTE(dstanek): If a cyclic reference is detected the code bails
+ # and returns None instead of falling into the infinite
+ # recursion trap.
+ self.assertIsNone(subtree)
+
+ def test_list_projects_in_subtree_invalid_project_id(self):
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.list_projects_in_subtree,
+ None)
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.list_projects_in_subtree,
+ uuid.uuid4().hex)
+
+ def test_list_project_parents(self):
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ project1 = projects_hierarchy[0]
+ project2 = projects_hierarchy[1]
+ project3 = projects_hierarchy[2]
+ project4 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ parent_id=project2['id'])
+ self.resource_api.create_project(project4['id'], project4)
+
+ parents1 = self.resource_api.list_project_parents(project3['id'])
+ self.assertEqual(3, len(parents1))
+ self.assertIn(project1, parents1)
+ self.assertIn(project2, parents1)
+
+ parents2 = self.resource_api.list_project_parents(project4['id'])
+ self.assertEqual(parents1, parents2)
+
+ parents = self.resource_api.list_project_parents(project1['id'])
+ # It has the default domain as parent
+ self.assertEqual(1, len(parents))
+
+ def test_update_project_enabled_cascade(self):
+ """Test update_project_cascade
+
+ Ensures the enabled attribute is correctly updated across
+ a simple 3-level projects hierarchy.
+ """
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ parent = projects_hierarchy[0]
+
+ # Disable in parent project disables the whole subtree
+ parent['enabled'] = False
+ # Store the ref from backend in another variable so we don't bother
+ # to remove other attributes that were not originally provided and
+ # were set in the manager, like parent_id and domain_id.
+ parent_ref = self.resource_api.update_project(parent['id'],
+ parent,
+ cascade=True)
+
+ subtree = self.resource_api.list_projects_in_subtree(parent['id'])
+ self.assertEqual(2, len(subtree))
+ self.assertFalse(parent_ref['enabled'])
+ self.assertFalse(subtree[0]['enabled'])
+ self.assertFalse(subtree[1]['enabled'])
+
+ # Enable parent project enables the whole subtree
+ parent['enabled'] = True
+ parent_ref = self.resource_api.update_project(parent['id'],
+ parent,
+ cascade=True)
+
+ subtree = self.resource_api.list_projects_in_subtree(parent['id'])
+ self.assertEqual(2, len(subtree))
+ self.assertTrue(parent_ref['enabled'])
+ self.assertTrue(subtree[0]['enabled'])
+ self.assertTrue(subtree[1]['enabled'])
+
+ def test_cannot_enable_cascade_with_parent_disabled(self):
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ grandparent = projects_hierarchy[0]
+ parent = projects_hierarchy[1]
+
+ grandparent['enabled'] = False
+ self.resource_api.update_project(grandparent['id'],
+ grandparent,
+ cascade=True)
+ subtree = self.resource_api.list_projects_in_subtree(parent['id'])
+ self.assertFalse(subtree[0]['enabled'])
+
+ parent['enabled'] = True
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.update_project,
+ parent['id'],
+ parent,
+ cascade=True)
+
+ def test_update_cascade_only_accepts_enabled(self):
+ # Update cascade does not accept any other attribute but 'enabled'
+ new_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ new_project['name'] = 'project1'
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.update_project,
+ new_project['id'],
+ new_project,
+ cascade=True)
+
+ def test_list_project_parents_invalid_project_id(self):
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.list_project_parents,
+ None)
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.list_project_parents,
+ uuid.uuid4().hex)
+
+ def test_create_project_doesnt_modify_passed_in_dict(self):
+ new_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ original_project = new_project.copy()
+ self.resource_api.create_project(new_project['id'], new_project)
+ self.assertDictEqual(original_project, new_project)
+
+ def test_update_project_enable(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertTrue(project_ref['enabled'])
+
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertEqual(project['enabled'], project_ref['enabled'])
+
+ # If not present, enabled field should not be updated
+ del project['enabled']
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertFalse(project_ref['enabled'])
+
+ project['enabled'] = True
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertEqual(project['enabled'], project_ref['enabled'])
+
+ del project['enabled']
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertTrue(project_ref['enabled'])
+
+ def test_create_invalid_domain_fails(self):
+ new_group = unit.new_group_ref(domain_id="doesnotexist")
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.create_group,
+ new_group)
+ new_user = unit.new_user_ref(domain_id="doesnotexist")
+ self.assertRaises(exception.DomainNotFound,
+ self.identity_api.create_user,
+ new_user)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_project_crud(self):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ project = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictContainsSubset(project, project_ref)
+
+ project['name'] = uuid.uuid4().hex
+ self.resource_api.update_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictContainsSubset(project, project_ref)
+
+ self.resource_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ def test_domain_delete_hierarchy(self):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+
+ # Creating a root and a leaf project inside the domain
+ projects_hierarchy = self._create_projects_hierarchy(
+ domain_id=domain['id'])
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[0]
+
+ # Disable the domain
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+
+ # Delete the domain
+ self.resource_api.delete_domain(domain['id'])
+
+ # Make sure the domain no longer exists
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ # Make sure the root project no longer exists
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project['id'])
+
+ # Make sure the leaf project no longer exists
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ leaf_project['id'])
+
+ def test_delete_projects_from_ids(self):
+ """Tests the resource backend call delete_projects_from_ids.
+
+ Tests the normal flow of the delete_projects_from_ids backend call,
+ that ensures no project on the list exists after it is succesfully
+ called.
+ """
+ project1_ref = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project2_ref = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ projects = (project1_ref, project2_ref)
+ for project in projects:
+ self.resource_api.create_project(project['id'], project)
+
+ # Setting up the ID's list
+ projects_ids = [p['id'] for p in projects]
+ self.resource_api.driver.delete_projects_from_ids(projects_ids)
+
+ # Ensuring projects no longer exist at backend level
+ for project_id in projects_ids:
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.driver.get_project,
+ project_id)
+
+ # Passing an empty list is silently ignored
+ self.resource_api.driver.delete_projects_from_ids([])
+
+ def test_delete_projects_from_ids_with_no_existing_project_id(self):
+ """Tests delete_projects_from_ids issues warning if not found.
+
+ Tests the resource backend call delete_projects_from_ids passing a
+ non existing ID in project_ids, which is logged and ignored by
+ the backend.
+ """
+ project_ref = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project_ref['id'], project_ref)
+
+ # Setting up the ID's list
+ projects_ids = (project_ref['id'], uuid.uuid4().hex)
+ with mock.patch('keystone.resource.backends.sql.LOG') as mock_log:
+ self.resource_api.delete_projects_from_ids(projects_ids)
+ self.assertTrue(mock_log.warning.called)
+ # The existing project was deleted.
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.driver.get_project,
+ project_ref['id'])
+
+ # Even if we only have one project, and it does not exist, it returns
+ # no error.
+ self.resource_api.driver.delete_projects_from_ids([uuid.uuid4().hex])
+
+ def test_delete_project_cascade(self):
+ # create a hierarchy with 3 levels
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ root_project = projects_hierarchy[0]
+ project1 = projects_hierarchy[1]
+ project2 = projects_hierarchy[2]
+
+ # Disabling all projects before attempting to delete
+ for project in (project2, project1, root_project):
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
+
+ self.resource_api.delete_project(root_project['id'], cascade=True)
+
+ for project in projects_hierarchy:
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ def test_delete_large_project_cascade(self):
+ """Try delete a large project with cascade true.
+
+ Tree we will create::
+
+ +-p1-+
+ | |
+ p5 p2
+ | |
+ p6 +-p3-+
+ | |
+ p7 p4
+ """
+ # create a hierarchy with 4 levels
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=4)
+ p1 = projects_hierarchy[0]
+ # Add the left branch to the hierarchy (p5, p6)
+ self._create_projects_hierarchy(hierarchy_size=2,
+ parent_project_id=p1['id'])
+ # Add p7 to the hierarchy
+ p3_id = projects_hierarchy[2]['id']
+ self._create_projects_hierarchy(hierarchy_size=1,
+ parent_project_id=p3_id)
+ # Reverse the hierarchy to disable the leaf first
+ prjs_hierarchy = ([p1] + self.resource_api.list_projects_in_subtree(
+ p1['id']))[::-1]
+
+ # Disabling all projects before attempting to delete
+ for project in prjs_hierarchy:
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
+
+ self.resource_api.delete_project(p1['id'], cascade=True)
+ for project in prjs_hierarchy:
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ def test_cannot_delete_project_cascade_with_enabled_child(self):
+ # create a hierarchy with 3 levels
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ root_project = projects_hierarchy[0]
+ project1 = projects_hierarchy[1]
+ project2 = projects_hierarchy[2]
+
+ project2['enabled'] = False
+ self.resource_api.update_project(project2['id'], project2)
+
+ # Cannot cascade delete root_project, since project1 is enabled
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.delete_project,
+ root_project['id'],
+ cascade=True)
+
+ # Ensuring no project was deleted, not even project2
+ self.resource_api.get_project(root_project['id'])
+ self.resource_api.get_project(project1['id'])
+ self.resource_api.get_project(project2['id'])
+
+ def test_hierarchical_projects_crud(self):
+ # create a hierarchy with just a root project (which is a leaf as well)
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=1)
+ root_project1 = projects_hierarchy[0]
+
+ # create a hierarchy with one root project and one leaf project
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project2 = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ # update description from leaf_project
+ leaf_project['description'] = 'new description'
+ self.resource_api.update_project(leaf_project['id'], leaf_project)
+ proj_ref = self.resource_api.get_project(leaf_project['id'])
+ self.assertDictEqual(leaf_project, proj_ref)
+
+ # update the parent_id is not allowed
+ leaf_project['parent_id'] = root_project1['id']
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.update_project,
+ leaf_project['id'],
+ leaf_project)
+
+ # delete root_project1
+ self.resource_api.delete_project(root_project1['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project1['id'])
+
+ # delete root_project2 is not allowed since it is not a leaf project
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.delete_project,
+ root_project2['id'])
+
+ def test_create_project_with_invalid_parent(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id, parent_id='fake')
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_create_leaf_project_with_different_domain(self):
+ root_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(root_project['id'], root_project)
+
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ leaf_project = unit.new_project_ref(domain_id=domain['id'],
+ parent_id=root_project['id'])
+
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ leaf_project['id'],
+ leaf_project)
+
+ def test_delete_hierarchical_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ self.resource_api.delete_project(leaf_project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ leaf_project['id'])
+
+ self.resource_api.delete_project(root_project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project['id'])
+
+ def test_delete_hierarchical_not_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.delete_project,
+ root_project['id'])
+
+ def test_update_project_parent(self):
+ projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3)
+ project1 = projects_hierarchy[0]
+ project2 = projects_hierarchy[1]
+ project3 = projects_hierarchy[2]
+
+ # project2 is the parent from project3
+ self.assertEqual(project3.get('parent_id'), project2['id'])
+
+ # try to update project3 parent to parent1
+ project3['parent_id'] = project1['id']
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.update_project,
+ project3['id'],
+ project3)
+
+ def test_create_project_under_disabled_one(self):
+ project1 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id, enabled=False)
+ self.resource_api.create_project(project1['id'], project1)
+
+ project2 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ parent_id=project1['id'])
+
+ # It's not possible to create a project under a disabled one in the
+ # hierarchy
+ self.assertRaises(exception.ValidationError,
+ self.resource_api.create_project,
+ project2['id'],
+ project2)
+
+ def test_disable_hierarchical_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ leaf_project = projects_hierarchy[1]
+
+ leaf_project['enabled'] = False
+ self.resource_api.update_project(leaf_project['id'], leaf_project)
+
+ project_ref = self.resource_api.get_project(leaf_project['id'])
+ self.assertEqual(leaf_project['enabled'], project_ref['enabled'])
+
+ def test_disable_hierarchical_not_leaf_project(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+
+ root_project['enabled'] = False
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.update_project,
+ root_project['id'],
+ root_project)
+
+ def test_enable_project_with_disabled_parent(self):
+ projects_hierarchy = self._create_projects_hierarchy()
+ root_project = projects_hierarchy[0]
+ leaf_project = projects_hierarchy[1]
+
+ # Disable leaf and root
+ leaf_project['enabled'] = False
+ self.resource_api.update_project(leaf_project['id'], leaf_project)
+ root_project['enabled'] = False
+ self.resource_api.update_project(root_project['id'], root_project)
+
+ # Try to enable the leaf project, it's not possible since it has
+ # a disabled parent
+ leaf_project['enabled'] = True
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.update_project,
+ leaf_project['id'],
+ leaf_project)
+
+ def _get_hierarchy_depth(self, project_id):
+ return len(self.resource_api.list_project_parents(project_id)) + 1
+
+ def test_check_hierarchy_depth(self):
+ # Should be allowed to have a hierarchy of the max depth specified
+ # in the config option plus one (to allow for the additional project
+ # acting as a domain after an upgrade)
+ projects_hierarchy = self._create_projects_hierarchy(
+ CONF.max_project_tree_depth)
+ leaf_project = projects_hierarchy[CONF.max_project_tree_depth - 1]
+
+ depth = self._get_hierarchy_depth(leaf_project['id'])
+ self.assertEqual(CONF.max_project_tree_depth + 1, depth)
+
+ # Creating another project in the hierarchy shouldn't be allowed
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id,
+ parent_id=leaf_project['id'])
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.create_project,
+ project['id'],
+ project)
+
+ def test_project_update_missing_attrs_with_a_value(self):
+ # Creating a project with no description attribute.
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ del project['description']
+ project = self.resource_api.create_project(project['id'], project)
+
+ # Add a description attribute.
+ project['description'] = uuid.uuid4().hex
+ self.resource_api.update_project(project['id'], project)
+
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project, project_ref)
+
+ def test_project_update_missing_attrs_with_a_falsey_value(self):
+ # Creating a project with no description attribute.
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ del project['description']
+ project = self.resource_api.create_project(project['id'], project)
+
+ # Add a description attribute.
+ project['description'] = ''
+ self.resource_api.update_project(project['id'], project)
+
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project, project_ref)
+
+ def test_domain_crud(self):
+ domain = unit.new_domain_ref()
+ domain_ref = self.resource_api.create_domain(domain['id'], domain)
+ self.assertDictEqual(domain, domain_ref)
+ domain_ref = self.resource_api.get_domain(domain['id'])
+ self.assertDictEqual(domain, domain_ref)
+
+ domain['name'] = uuid.uuid4().hex
+ domain_ref = self.resource_api.update_domain(domain['id'], domain)
+ self.assertDictEqual(domain, domain_ref)
+ domain_ref = self.resource_api.get_domain(domain['id'])
+ self.assertDictEqual(domain, domain_ref)
+
+ # Ensure an 'enabled' domain cannot be deleted
+ self.assertRaises(exception.ForbiddenNotSecurity,
+ self.resource_api.delete_domain,
+ domain_id=domain['id'])
+
+ # Disable the domain
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+
+ # Delete the domain
+ self.resource_api.delete_domain(domain['id'])
+
+ # Make sure the domain no longer exists
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_domain_name_case_sensitivity(self):
+ # create a ref with a lowercase name
+ domain_name = 'test_domain'
+ ref = unit.new_domain_ref(name=domain_name)
+
+ lower_case_domain = self.resource_api.create_domain(ref['id'], ref)
+
+ # assign a new ID to the ref with the same name, but in uppercase
+ ref['id'] = uuid.uuid4().hex
+ ref['name'] = domain_name.upper()
+ upper_case_domain = self.resource_api.create_domain(ref['id'], ref)
+
+ # We can get each domain by name
+ lower_case_domain_ref = self.resource_api.get_domain_by_name(
+ domain_name)
+ self.assertDictEqual(lower_case_domain, lower_case_domain_ref)
+
+ upper_case_domain_ref = self.resource_api.get_domain_by_name(
+ domain_name.upper())
+ self.assertDictEqual(upper_case_domain, upper_case_domain_ref)
+
+ def test_project_attribute_update(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(project['id'], project)
+
+ # pick a key known to be non-existent
+ key = 'description'
+
+ def assert_key_equals(value):
+ project_ref = self.resource_api.update_project(
+ project['id'], project)
+ self.assertEqual(value, project_ref[key])
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertEqual(value, project_ref[key])
+
+ def assert_get_key_is(value):
+ project_ref = self.resource_api.update_project(
+ project['id'], project)
+ self.assertIs(project_ref.get(key), value)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertIs(project_ref.get(key), value)
+
+ # add an attribute that doesn't exist, set it to a falsey value
+ value = ''
+ project[key] = value
+ assert_key_equals(value)
+
+ # set an attribute with a falsey value to null
+ value = None
+ project[key] = value
+ assert_get_key_is(value)
+
+ # do it again, in case updating from this situation is handled oddly
+ value = None
+ project[key] = value
+ assert_get_key_is(value)
+
+ # set a possibly-null value to a falsey value
+ value = ''
+ project[key] = value
+ assert_key_equals(value)
+
+ # set a falsey value to a truthy value
+ value = uuid.uuid4().hex
+ project[key] = value
+ assert_key_equals(value)
+
+ @unit.skip_if_cache_disabled('resource')
+ @unit.skip_if_no_multiple_domains_support
+ def test_domain_rename_invalidates_get_domain_by_name_cache(self):
+ domain = unit.new_domain_ref()
+ domain_id = domain['id']
+ domain_name = domain['name']
+ self.resource_api.create_domain(domain_id, domain)
+ domain_ref = self.resource_api.get_domain_by_name(domain_name)
+ domain_ref['name'] = uuid.uuid4().hex
+ self.resource_api.update_domain(domain_id, domain_ref)
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain_by_name,
+ domain_name)
+
+ @unit.skip_if_cache_disabled('resource')
+ def test_cache_layer_domain_crud(self):
+ domain = unit.new_domain_ref()
+ domain_id = domain['id']
+ # Create Domain
+ self.resource_api.create_domain(domain_id, domain)
+ project_domain_ref = self.resource_api.get_project(domain_id)
+ domain_ref = self.resource_api.get_domain(domain_id)
+ updated_project_domain_ref = copy.deepcopy(project_domain_ref)
+ updated_project_domain_ref['name'] = uuid.uuid4().hex
+ updated_domain_ref = copy.deepcopy(domain_ref)
+ updated_domain_ref['name'] = updated_project_domain_ref['name']
+ # Update domain, bypassing resource api manager
+ self.resource_api.driver.update_project(domain_id,
+ updated_project_domain_ref)
+ # Verify get_domain still returns the domain
+ self.assertDictContainsSubset(
+ domain_ref, self.resource_api.get_domain(domain_id))
+ # Invalidate cache
+ self.resource_api.get_domain.invalidate(self.resource_api,
+ domain_id)
+ # Verify get_domain returns the updated domain
+ self.assertDictContainsSubset(
+ updated_domain_ref, self.resource_api.get_domain(domain_id))
+ # Update the domain back to original ref, using the assignment api
+ # manager
+ self.resource_api.update_domain(domain_id, domain_ref)
+ self.assertDictContainsSubset(
+ domain_ref, self.resource_api.get_domain(domain_id))
+ # Make sure domain is 'disabled', bypass resource api manager
+ project_domain_ref_disabled = project_domain_ref.copy()
+ project_domain_ref_disabled['enabled'] = False
+ self.resource_api.driver.update_project(domain_id,
+ project_domain_ref_disabled)
+ self.resource_api.driver.update_project(domain_id, {'enabled': False})
+ # Delete domain, bypassing resource api manager
+ self.resource_api.driver.delete_project(domain_id)
+ # Verify get_domain still returns the domain
+ self.assertDictContainsSubset(
+ domain_ref, self.resource_api.get_domain(domain_id))
+ # Invalidate cache
+ self.resource_api.get_domain.invalidate(self.resource_api,
+ domain_id)
+ # Verify get_domain now raises DomainNotFound
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain, domain_id)
+ # Recreate Domain
+ self.resource_api.create_domain(domain_id, domain)
+ self.resource_api.get_domain(domain_id)
+ # Make sure domain is 'disabled', bypass resource api manager
+ domain['enabled'] = False
+ self.resource_api.driver.update_project(domain_id, domain)
+ self.resource_api.driver.update_project(domain_id, {'enabled': False})
+ # Delete domain
+ self.resource_api.delete_domain(domain_id)
+ # verify DomainNotFound raised
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain_id)
+
+ @unit.skip_if_cache_disabled('resource')
+ @unit.skip_if_no_multiple_domains_support
+ def test_project_rename_invalidates_get_project_by_name_cache(self):
+ domain = unit.new_domain_ref()
+ project = unit.new_project_ref(domain_id=domain['id'])
+ project_id = project['id']
+ project_name = project['name']
+ self.resource_api.create_domain(domain['id'], domain)
+ # Create a project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project_by_name(project_name, domain['id'])
+ project['name'] = uuid.uuid4().hex
+ self.resource_api.update_project(project_id, project)
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project_by_name,
+ project_name,
+ domain['id'])
+
+ @unit.skip_if_cache_disabled('resource')
+ @unit.skip_if_no_multiple_domains_support
+ def test_cache_layer_project_crud(self):
+ domain = unit.new_domain_ref()
+ project = unit.new_project_ref(domain_id=domain['id'])
+ project_id = project['id']
+ self.resource_api.create_domain(domain['id'], domain)
+ # Create a project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project(project_id)
+ updated_project = copy.deepcopy(project)
+ updated_project['name'] = uuid.uuid4().hex
+ # Update project, bypassing resource manager
+ self.resource_api.driver.update_project(project_id,
+ updated_project)
+ # Verify get_project still returns the original project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Invalidate cache
+ self.resource_api.get_project.invalidate(self.resource_api,
+ project_id)
+ # Verify get_project now returns the new project
+ self.assertDictContainsSubset(
+ updated_project,
+ self.resource_api.get_project(project_id))
+ # Update project using the resource_api manager back to original
+ self.resource_api.update_project(project['id'], project)
+ # Verify get_project returns the original project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Delete project bypassing resource
+ self.resource_api.driver.delete_project(project_id)
+ # Verify get_project still returns the project_ref
+ self.assertDictContainsSubset(
+ project, self.resource_api.get_project(project_id))
+ # Invalidate cache
+ self.resource_api.get_project.invalidate(self.resource_api,
+ project_id)
+ # Verify ProjectNotFound now raised
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project_id)
+ # recreate project
+ self.resource_api.create_project(project_id, project)
+ self.resource_api.get_project(project_id)
+ # delete project
+ self.resource_api.delete_project(project_id)
+ # Verify ProjectNotFound is raised
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project_id)
+
+ @unit.skip_if_no_multiple_domains_support
+ def test_get_default_domain_by_name(self):
+ domain_name = 'default'
+
+ domain = unit.new_domain_ref(name=domain_name)
+ self.resource_api.create_domain(domain['id'], domain)
+
+ domain_ref = self.resource_api.get_domain_by_name(domain_name)
+ self.assertEqual(domain, domain_ref)
+
+ def test_get_not_default_domain_by_name(self):
+ domain_name = 'foo'
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain_by_name,
+ domain_name)
+
+ def test_project_update_and_project_get_return_same_response(self):
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+
+ self.resource_api.create_project(project['id'], project)
+
+ updated_project = {'enabled': False}
+ updated_project_ref = self.resource_api.update_project(
+ project['id'], updated_project)
+
+ # SQL backend adds 'extra' field
+ updated_project_ref.pop('extra', None)
+
+ self.assertIs(False, updated_project_ref['enabled'])
+
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(updated_project_ref, project_ref)
+
+
+class ResourceDriverTests(object):
+ """Tests for the resource driver.
+
+ Subclasses must set self.driver to the driver instance.
+
+ """
+
+ def test_create_project(self):
+ project_id = uuid.uuid4().hex
+ project = {
+ 'name': uuid.uuid4().hex,
+ 'id': project_id,
+ 'domain_id': uuid.uuid4().hex,
+ }
+ self.driver.create_project(project_id, project)
+
+ def test_create_project_all_defined_properties(self):
+ project_id = uuid.uuid4().hex
+ project = {
+ 'name': uuid.uuid4().hex,
+ 'id': project_id,
+ 'domain_id': uuid.uuid4().hex,
+ 'description': uuid.uuid4().hex,
+ 'enabled': True,
+ 'parent_id': uuid.uuid4().hex,
+ 'is_domain': True,
+ }
+ self.driver.create_project(project_id, project)
+
+ def test_create_project_null_domain(self):
+ project_id = uuid.uuid4().hex
+ project = {
+ 'name': uuid.uuid4().hex,
+ 'id': project_id,
+ 'domain_id': None,
+ }
+ self.driver.create_project(project_id, project)
+
+ def test_create_project_same_name_same_domain_conflict(self):
+ name = uuid.uuid4().hex
+ domain_id = uuid.uuid4().hex
+
+ project_id = uuid.uuid4().hex
+ project = {
+ 'name': name,
+ 'id': project_id,
+ 'domain_id': domain_id,
+ }
+ self.driver.create_project(project_id, project)
+
+ project_id = uuid.uuid4().hex
+ project = {
+ 'name': name,
+ 'id': project_id,
+ 'domain_id': domain_id,
+ }
+ self.assertRaises(exception.Conflict, self.driver.create_project,
+ project_id, project)
+
+ def test_create_project_same_id_conflict(self):
+ project_id = uuid.uuid4().hex
+
+ project = {
+ 'name': uuid.uuid4().hex,
+ 'id': project_id,
+ 'domain_id': uuid.uuid4().hex,
+ }
+ self.driver.create_project(project_id, project)
+
+ project = {
+ 'name': uuid.uuid4().hex,
+ 'id': project_id,
+ 'domain_id': uuid.uuid4().hex,
+ }
+ self.assertRaises(exception.Conflict, self.driver.create_project,
+ project_id, project)
diff --git a/keystone-moon/keystone/tests/unit/resource/test_controllers.py b/keystone-moon/keystone/tests/unit/resource/test_controllers.py
new file mode 100644
index 00000000..b8f247c8
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/test_controllers.py
@@ -0,0 +1,57 @@
+# Copyright 2016 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+
+from keystone import exception
+from keystone.resource import controllers
+from keystone.tests import unit
+from keystone.tests.unit.ksfixtures import database
+
+
+CONF = cfg.CONF
+
+_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}}
+
+
+class TenantTestCaseNoDefaultDomain(unit.TestCase):
+
+ def setUp(self):
+ super(TenantTestCaseNoDefaultDomain, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+ self.tenant_controller = controllers.Tenant()
+
+ def test_setup(self):
+ # Other tests in this class assume there's no default domain, so make
+ # sure the setUp worked as expected.
+ self.assertRaises(
+ exception.DomainNotFound,
+ self.resource_api.get_domain, CONF.identity.default_domain_id)
+
+ def test_get_all_projects(self):
+ # When get_all_projects is done and there's no default domain, the
+ # result is an empty list.
+ res = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT)
+ self.assertEqual([], res['tenants'])
+
+ def test_create_project(self):
+ # When a project is created using the v2 controller and there's no
+ # default domain, it doesn't fail with can't find domain (a default
+ # domain is created)
+ tenant = {'name': uuid.uuid4().hex}
+ self.tenant_controller.create_project(_ADMIN_CONTEXT, tenant)
+ # If the above doesn't fail then this is successful.
diff --git a/keystone-moon/keystone/tests/unit/resource/test_core.py b/keystone-moon/keystone/tests/unit/resource/test_core.py
new file mode 100644
index 00000000..2eb87e4c
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/resource/test_core.py
@@ -0,0 +1,692 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import uuid
+
+import mock
+from testtools import matchers
+
+from oslo_config import cfg
+from oslotest import mockpatch
+
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit.ksfixtures import database
+
+
+CONF = cfg.CONF
+
+
+class TestResourceManagerNoFixtures(unit.SQLDriverOverrides, unit.TestCase):
+
+ def setUp(self):
+ super(TestResourceManagerNoFixtures, self).setUp()
+ self.useFixture(database.Database(self.sql_driver_version_overrides))
+ self.load_backends()
+
+ def test_ensure_default_domain_exists(self):
+ # When there's no default domain, ensure_default_domain_exists creates
+ # it.
+
+ # First make sure there's no default domain.
+ self.assertRaises(
+ exception.DomainNotFound,
+ self.resource_api.get_domain, CONF.identity.default_domain_id)
+
+ self.resource_api.ensure_default_domain_exists()
+ default_domain = self.resource_api.get_domain(
+ CONF.identity.default_domain_id)
+
+ expected_domain = {
+ 'id': CONF.identity.default_domain_id,
+ 'name': 'Default',
+ 'enabled': True,
+ 'description': 'Domain created automatically to support V2.0 '
+ 'operations.',
+ }
+ self.assertEqual(expected_domain, default_domain)
+
+ def test_ensure_default_domain_exists_already_exists(self):
+ # When there's already a default domain, ensure_default_domain_exists
+ # doesn't do anything.
+
+ name = uuid.uuid4().hex
+ description = uuid.uuid4().hex
+ domain_attrs = {
+ 'id': CONF.identity.default_domain_id,
+ 'name': name,
+ 'description': description,
+ }
+ self.resource_api.create_domain(CONF.identity.default_domain_id,
+ domain_attrs)
+
+ self.resource_api.ensure_default_domain_exists()
+
+ default_domain = self.resource_api.get_domain(
+ CONF.identity.default_domain_id)
+
+ expected_domain = {
+ 'id': CONF.identity.default_domain_id,
+ 'name': name,
+ 'enabled': True,
+ 'description': description,
+ }
+
+ self.assertEqual(expected_domain, default_domain)
+
+ def test_ensure_default_domain_exists_fails(self):
+ # When there's an unexpected exception creating domain it's passed on.
+
+ self.useFixture(mockpatch.PatchObject(
+ self.resource_api, 'create_domain',
+ side_effect=exception.UnexpectedError))
+
+ self.assertRaises(exception.UnexpectedError,
+ self.resource_api.ensure_default_domain_exists)
+
+ def test_update_project_name_conflict(self):
+ name = uuid.uuid4().hex
+ description = uuid.uuid4().hex
+ domain_attrs = {
+ 'id': CONF.identity.default_domain_id,
+ 'name': name,
+ 'description': description,
+ }
+ domain = self.resource_api.create_domain(
+ CONF.identity.default_domain_id, domain_attrs)
+ project1 = unit.new_project_ref(domain_id=domain['id'],
+ name=uuid.uuid4().hex)
+ self.resource_api.create_project(project1['id'], project1)
+ project2 = unit.new_project_ref(domain_id=domain['id'],
+ name=uuid.uuid4().hex)
+ project = self.resource_api.create_project(project2['id'], project2)
+
+ self.assertRaises(exception.Conflict,
+ self.resource_api.update_project,
+ project['id'], {'name': project1['name']})
+
+
+class DomainConfigDriverTests(object):
+
+ def _domain_config_crud(self, sensitive):
+ domain = uuid.uuid4().hex
+ group = uuid.uuid4().hex
+ option = uuid.uuid4().hex
+ value = uuid.uuid4().hex
+ self.driver.create_config_option(
+ domain, group, option, value, sensitive)
+ res = self.driver.get_config_option(
+ domain, group, option, sensitive)
+ config = {'group': group, 'option': option, 'value': value}
+ self.assertEqual(config, res)
+
+ value = uuid.uuid4().hex
+ self.driver.update_config_option(
+ domain, group, option, value, sensitive)
+ res = self.driver.get_config_option(
+ domain, group, option, sensitive)
+ config = {'group': group, 'option': option, 'value': value}
+ self.assertEqual(config, res)
+
+ self.driver.delete_config_options(
+ domain, group, option, sensitive)
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.driver.get_config_option,
+ domain, group, option, sensitive)
+ # ...and silent if we try to delete it again
+ self.driver.delete_config_options(
+ domain, group, option, sensitive)
+
+ def test_whitelisted_domain_config_crud(self):
+ self._domain_config_crud(sensitive=False)
+
+ def test_sensitive_domain_config_crud(self):
+ self._domain_config_crud(sensitive=True)
+
+ def _list_domain_config(self, sensitive):
+ """Test listing by combination of domain, group & option."""
+ config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ # Put config2 in the same group as config1
+ config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ config3 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': 100}
+ domain = uuid.uuid4().hex
+
+ for config in [config1, config2, config3]:
+ self.driver.create_config_option(
+ domain, config['group'], config['option'],
+ config['value'], sensitive)
+
+ # Try listing all items from a domain
+ res = self.driver.list_config_options(
+ domain, sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(3))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config2, config3])
+
+ # Try listing by domain and group
+ res = self.driver.list_config_options(
+ domain, group=config1['group'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(2))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config2])
+
+ # Try listing by domain, group and option
+ res = self.driver.list_config_options(
+ domain, group=config2['group'],
+ option=config2['option'], sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(1))
+ self.assertEqual(config2, res[0])
+
+ def test_list_whitelisted_domain_config_crud(self):
+ self._list_domain_config(False)
+
+ def test_list_sensitive_domain_config_crud(self):
+ self._list_domain_config(True)
+
+ def _delete_domain_configs(self, sensitive):
+ """Test deleting by combination of domain, group & option."""
+ config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ # Put config2 and config3 in the same group as config1
+ config2 = {'group': config1['group'], 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ config3 = {'group': config1['group'], 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ config4 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ domain = uuid.uuid4().hex
+
+ for config in [config1, config2, config3, config4]:
+ self.driver.create_config_option(
+ domain, config['group'], config['option'],
+ config['value'], sensitive)
+
+ # Try deleting by domain, group and option
+ res = self.driver.delete_config_options(
+ domain, group=config2['group'],
+ option=config2['option'], sensitive=sensitive)
+ res = self.driver.list_config_options(
+ domain, sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(3))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config3, config4])
+
+ # Try deleting by domain and group
+ res = self.driver.delete_config_options(
+ domain, group=config4['group'], sensitive=sensitive)
+ res = self.driver.list_config_options(
+ domain, sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(2))
+ for res_entry in res:
+ self.assertIn(res_entry, [config1, config3])
+
+ # Try deleting all items from a domain
+ res = self.driver.delete_config_options(
+ domain, sensitive=sensitive)
+ res = self.driver.list_config_options(
+ domain, sensitive=sensitive)
+ self.assertThat(res, matchers.HasLength(0))
+
+ def test_delete_whitelisted_domain_configs(self):
+ self._delete_domain_configs(False)
+
+ def test_delete_sensitive_domain_configs(self):
+ self._delete_domain_configs(True)
+
+ def _create_domain_config_twice(self, sensitive):
+ """Test conflict error thrown if create the same option twice."""
+ config = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex,
+ 'value': uuid.uuid4().hex}
+ domain = uuid.uuid4().hex
+
+ self.driver.create_config_option(
+ domain, config['group'], config['option'],
+ config['value'], sensitive=sensitive)
+ self.assertRaises(exception.Conflict,
+ self.driver.create_config_option,
+ domain, config['group'], config['option'],
+ config['value'], sensitive=sensitive)
+
+ def test_create_whitelisted_domain_config_twice(self):
+ self._create_domain_config_twice(False)
+
+ def test_create_sensitive_domain_config_twice(self):
+ self._create_domain_config_twice(True)
+
+
+class DomainConfigTests(object):
+
+ def setUp(self):
+ self.domain = unit.new_domain_ref()
+ self.resource_api.create_domain(self.domain['id'], self.domain)
+ self.addCleanup(self.clean_up_domain)
+
+ def clean_up_domain(self):
+ # NOTE(henry-nash): Deleting the domain will also delete any domain
+ # configs for this domain.
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+ self.resource_api.delete_domain(self.domain['id'])
+ del self.domain
+
+ def test_create_domain_config_including_sensitive_option(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ # password is sensitive, so check that the whitelisted portion and
+ # the sensitive piece have been stored in the appropriate locations.
+ res = self.domain_config_api.get_config(self.domain['id'])
+ config_whitelisted = copy.deepcopy(config)
+ config_whitelisted['ldap'].pop('password')
+ self.assertEqual(config_whitelisted, res)
+ res = self.domain_config_api.driver.get_config_option(
+ self.domain['id'], 'ldap', 'password', sensitive=True)
+ self.assertEqual(config['ldap']['password'], res['value'])
+
+ # Finally, use the non-public API to get back the whole config
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(config, res)
+
+ def test_get_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ res = self.domain_config_api.get_config(self.domain['id'],
+ group='identity')
+ config_partial = copy.deepcopy(config)
+ config_partial.pop('ldap')
+ self.assertEqual(config_partial, res)
+ res = self.domain_config_api.get_config(
+ self.domain['id'], group='ldap', option='user_tree_dn')
+ self.assertEqual({'user_tree_dn': config['ldap']['user_tree_dn']}, res)
+ # ...but we should fail to get a sensitive option
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'],
+ group='ldap', option='password')
+
+ def test_delete_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ self.domain_config_api.delete_config(
+ self.domain['id'], group='identity')
+ config_partial = copy.deepcopy(config)
+ config_partial.pop('identity')
+ config_partial['ldap'].pop('password')
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(config_partial, res)
+
+ self.domain_config_api.delete_config(
+ self.domain['id'], group='ldap', option='url')
+ config_partial = copy.deepcopy(config_partial)
+ config_partial['ldap'].pop('url')
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(config_partial, res)
+
+ def test_get_options_not_in_domain_config(self):
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'])
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'],
+ group='identity')
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.get_config, self.domain['id'],
+ group='ldap', option='user_tree_dn')
+
+ def test_get_sensitive_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual({}, res)
+ self.domain_config_api.create_config(self.domain['id'], config)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(config, res)
+
+ def test_update_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ # Try updating a group
+ new_config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_filter': uuid.uuid4().hex}}
+ res = self.domain_config_api.update_config(
+ self.domain['id'], new_config, group='ldap')
+ expected_config = copy.deepcopy(config)
+ expected_config['ldap']['url'] = new_config['ldap']['url']
+ expected_config['ldap']['user_filter'] = (
+ new_config['ldap']['user_filter'])
+ expected_full_config = copy.deepcopy(expected_config)
+ expected_config['ldap'].pop('password')
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(expected_config, res)
+ # The sensitive option should still exist
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(expected_full_config, res)
+
+ # Try updating a single whitelisted option
+ self.domain_config_api.delete_config(self.domain['id'])
+ self.domain_config_api.create_config(self.domain['id'], config)
+ new_config = {'url': uuid.uuid4().hex}
+ res = self.domain_config_api.update_config(
+ self.domain['id'], new_config, group='ldap', option='url')
+
+ # Make sure whitelisted and full config is updated
+ expected_whitelisted_config = copy.deepcopy(config)
+ expected_whitelisted_config['ldap']['url'] = new_config['url']
+ expected_full_config = copy.deepcopy(expected_whitelisted_config)
+ expected_whitelisted_config['ldap'].pop('password')
+ self.assertEqual(expected_whitelisted_config, res)
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(expected_whitelisted_config, res)
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(expected_full_config, res)
+
+ # Try updating a single sensitive option
+ self.domain_config_api.delete_config(self.domain['id'])
+ self.domain_config_api.create_config(self.domain['id'], config)
+ new_config = {'password': uuid.uuid4().hex}
+ res = self.domain_config_api.update_config(
+ self.domain['id'], new_config, group='ldap', option='password')
+ # The whitelisted config should not have changed...
+ expected_whitelisted_config = copy.deepcopy(config)
+ expected_full_config = copy.deepcopy(config)
+ expected_whitelisted_config['ldap'].pop('password')
+ self.assertEqual(expected_whitelisted_config, res)
+ res = self.domain_config_api.get_config(self.domain['id'])
+ self.assertEqual(expected_whitelisted_config, res)
+ expected_full_config['ldap']['password'] = new_config['password']
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ # ...but the sensitive piece should have.
+ self.assertEqual(expected_full_config, res)
+
+ def test_update_invalid_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ # An extra group, when specifying one group should fail
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group='ldap')
+ # An extra option, when specifying one option should fail
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config['ldap'],
+ group='ldap', option='url')
+
+ # Now try the right number of groups/options, but just not
+ # ones that are in the config provided
+ config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group='identity')
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config['ldap'], group='ldap',
+ option='url')
+
+ # Now some valid groups/options, but just not ones that are in the
+ # existing config
+ config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ config_wrong_group = {'identity': {'driver': uuid.uuid4().hex}}
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.update_config,
+ self.domain['id'], config_wrong_group,
+ group='identity')
+ config_wrong_option = {'url': uuid.uuid4().hex}
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.update_config,
+ self.domain['id'], config_wrong_option,
+ group='ldap', option='url')
+
+ # And finally just some bad groups/options
+ bad_group = uuid.uuid4().hex
+ config = {bad_group: {'user': uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group=bad_group,
+ option='user')
+ bad_option = uuid.uuid4().hex
+ config = {'ldap': {bad_option: uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.update_config,
+ self.domain['id'], config, group='ldap',
+ option=bad_option)
+
+ def test_create_invalid_domain_config(self):
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], {})
+ config = {uuid.uuid4().hex: uuid.uuid4().hex}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+ config = {uuid.uuid4().hex: {uuid.uuid4().hex: uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+ config = {'ldap': {uuid.uuid4().hex: uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+ # Try an option that IS in the standard conf, but neither whitelisted
+ # or marked as sensitive
+ config = {'identity': {'user_tree_dn': uuid.uuid4().hex}}
+ self.assertRaises(exception.InvalidDomainConfig,
+ self.domain_config_api.create_config,
+ self.domain['id'], config)
+
+ def test_delete_invalid_partial_domain_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ # Try deleting a group not in the config
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.delete_config,
+ self.domain['id'], group='identity')
+ # Try deleting an option not in the config
+ self.assertRaises(exception.DomainConfigNotFound,
+ self.domain_config_api.delete_config,
+ self.domain['id'],
+ group='ldap', option='user_tree_dn')
+
+ def test_sensitive_substitution_in_domain_config(self):
+ # Create a config that contains a whitelisted option that requires
+ # substitution of a sensitive option.
+ config = {'ldap': {'url': 'my_url/%(password)s',
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+
+ # Read back the config with the internal method and ensure that the
+ # substitution has taken place.
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ expected_url = (
+ config['ldap']['url'] % {'password': config['ldap']['password']})
+ self.assertEqual(expected_url, res['ldap']['url'])
+
+ def test_invalid_sensitive_substitution_in_domain_config(self):
+ """Check that invalid substitutions raise warnings."""
+ mock_log = mock.Mock()
+
+ invalid_option_config = {
+ 'ldap': {'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+
+ for invalid_option in ['my_url/%(passssword)s',
+ 'my_url/%(password',
+ 'my_url/%(password)',
+ 'my_url/%(password)d']:
+ invalid_option_config['ldap']['url'] = invalid_option
+ self.domain_config_api.create_config(
+ self.domain['id'], invalid_option_config)
+
+ with mock.patch('keystone.resource.core.LOG', mock_log):
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ mock_log.warning.assert_any_call(mock.ANY)
+ self.assertEqual(
+ invalid_option_config['ldap']['url'], res['ldap']['url'])
+
+ def test_escaped_sequence_in_domain_config(self):
+ """Check that escaped '%(' doesn't get interpreted."""
+ mock_log = mock.Mock()
+
+ escaped_option_config = {
+ 'ldap': {'url': 'my_url/%%(password)s',
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+
+ self.domain_config_api.create_config(
+ self.domain['id'], escaped_option_config)
+
+ with mock.patch('keystone.resource.core.LOG', mock_log):
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertFalse(mock_log.warn.called)
+ # The escaping '%' should have been removed
+ self.assertEqual('my_url/%(password)s', res['ldap']['url'])
+
+ @unit.skip_if_cache_disabled('domain_config')
+ def test_cache_layer_get_sensitive_config(self):
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ # cache the result
+ res = self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id'])
+ self.assertEqual(config, res)
+
+ # delete, bypassing domain config manager api
+ self.domain_config_api.delete_config_options(self.domain['id'])
+ self.domain_config_api.delete_config_options(self.domain['id'],
+ sensitive=True)
+
+ self.assertDictEqual(
+ res, self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id']))
+ self.domain_config_api.get_config_with_sensitive_info.invalidate(
+ self.domain_config_api, self.domain['id'])
+ self.assertDictEqual(
+ {},
+ self.domain_config_api.get_config_with_sensitive_info(
+ self.domain['id']))
+
+ def test_delete_domain_deletes_configs(self):
+ """Test domain deletion clears the domain configs."""
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_tree_dn': uuid.uuid4().hex,
+ 'password': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(domain['id'], config)
+
+ # Now delete the domain
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+ self.resource_api.delete_domain(domain['id'])
+
+ # Check domain configs have also been deleted
+ self.assertRaises(
+ exception.DomainConfigNotFound,
+ self.domain_config_api.get_config,
+ domain['id'])
+
+ # The get_config_with_sensitive_info does not throw an exception if
+ # the config is empty, it just returns an empty dict
+ self.assertDictEqual(
+ {},
+ self.domain_config_api.get_config_with_sensitive_info(
+ domain['id']))
+
+ def test_config_registration(self):
+ type = uuid.uuid4().hex
+ self.domain_config_api.obtain_registration(
+ self.domain['id'], type)
+ self.domain_config_api.release_registration(
+ self.domain['id'], type=type)
+
+ # Make sure that once someone has it, nobody else can get it.
+ # This includes the domain who already has it.
+ self.domain_config_api.obtain_registration(
+ self.domain['id'], type)
+ self.assertFalse(
+ self.domain_config_api.obtain_registration(
+ self.domain['id'], type))
+
+ # Make sure we can read who does have it
+ self.assertEqual(
+ self.domain['id'],
+ self.domain_config_api.read_registration(type))
+
+ # Make sure releasing it is silent if the domain specified doesn't
+ # have the registration
+ domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.resource_api.create_domain(domain2['id'], domain2)
+ self.domain_config_api.release_registration(
+ domain2['id'], type=type)
+
+ # If nobody has the type registered, then trying to read it should
+ # raise ConfigRegistrationNotFound
+ self.domain_config_api.release_registration(
+ self.domain['id'], type=type)
+ self.assertRaises(exception.ConfigRegistrationNotFound,
+ self.domain_config_api.read_registration,
+ type)
+
+ # Finally check multiple registrations are cleared if you free the
+ # registration without specifying the type
+ type2 = uuid.uuid4().hex
+ self.domain_config_api.obtain_registration(
+ self.domain['id'], type)
+ self.domain_config_api.obtain_registration(
+ self.domain['id'], type2)
+ self.domain_config_api.release_registration(self.domain['id'])
+ self.assertRaises(exception.ConfigRegistrationNotFound,
+ self.domain_config_api.read_registration,
+ type)
+ self.assertRaises(exception.ConfigRegistrationNotFound,
+ self.domain_config_api.read_registration,
+ type2)
diff --git a/keystone-moon/keystone/tests/unit/rest.py b/keystone-moon/keystone/tests/unit/rest.py
index 35b47e2b..512c301d 100644
--- a/keystone-moon/keystone/tests/unit/rest.py
+++ b/keystone-moon/keystone/tests/unit/rest.py
@@ -61,7 +61,7 @@ class RestfulTestCase(unit.TestCase):
# Will need to reset the plug-ins
self.addCleanup(setattr, auth_controllers, 'AUTH_METHODS', {})
- self.useFixture(database.Database())
+ self.useFixture(database.Database(self.sql_driver_version_overrides))
self.load_backends()
self.load_fixtures(default_fixtures)
@@ -114,11 +114,10 @@ class RestfulTestCase(unit.TestCase):
example::
- self.assertResponseStatus(response, 204)
+ self.assertResponseStatus(response, http_client.NO_CONTENT)
"""
self.assertEqual(
- response.status_code,
- expected_status,
+ expected_status, response.status_code,
'Status code %s is not %s, as expected\n\n%s' %
(response.status_code, expected_status, response.body))
@@ -133,9 +132,9 @@ class RestfulTestCase(unit.TestCase):
Subclasses can override this function based on the expected response.
"""
- self.assertEqual(response.status_code, expected_status)
+ self.assertEqual(expected_status, response.status_code)
error = response.result['error']
- self.assertEqual(error['code'], response.status_code)
+ self.assertEqual(response.status_code, error['code'])
self.assertIsNotNone(error.get('title'))
def _to_content_type(self, body, headers, content_type=None):
@@ -146,7 +145,11 @@ class RestfulTestCase(unit.TestCase):
headers['Accept'] = 'application/json'
if body:
headers['Content-Type'] = 'application/json'
- return jsonutils.dumps(body)
+ # NOTE(davechen):dump the body to bytes since WSGI requires
+ # the body of the response to be `Bytestrings`.
+ # see pep-3333:
+ # https://www.python.org/dev/peps/pep-3333/#a-note-on-string-types
+ return jsonutils.dump_as_bytes(body)
def _from_content_type(self, response, content_type=None):
"""Attempt to decode JSON and XML automatically, if detected."""
@@ -213,6 +216,17 @@ class RestfulTestCase(unit.TestCase):
r = self.public_request(method='POST', path='/v2.0/tokens', body=body)
return self._get_token_id(r)
+ def get_admin_token(self):
+ return self._get_token({
+ 'auth': {
+ 'passwordCredentials': {
+ 'username': self.user_reqadmin['name'],
+ 'password': self.user_reqadmin['password']
+ },
+ 'tenantId': default_fixtures.SERVICE_TENANT_ID
+ }
+ })
+
def get_unscoped_token(self):
"""Convenience method so that we can test authenticated requests."""
return self._get_token({
diff --git a/keystone-moon/keystone/tests/unit/schema/__init__.py b/keystone-moon/keystone/tests/unit/schema/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/schema/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/schema/v2.py b/keystone-moon/keystone/tests/unit/schema/v2.py
new file mode 100644
index 00000000..ed260a00
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/schema/v2.py
@@ -0,0 +1,161 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import copy
+
+from keystone.common import validation
+from keystone.common.validation import parameter_types
+from keystone.common.validation import validators
+
+
+_project_properties = {
+ 'id': parameter_types.id_string,
+ 'name': parameter_types.name,
+ 'enabled': parameter_types.boolean,
+ 'description': validation.nullable(parameter_types.description),
+}
+
+_token_properties = {
+ 'audit_ids': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'string',
+ },
+ 'minItems': 1,
+ 'maxItems': 2,
+ },
+ 'id': {'type': 'string'},
+ 'expires': {'type': 'string'},
+ 'issued_at': {'type': 'string'},
+ 'tenant': {
+ 'type': 'object',
+ 'properties': _project_properties,
+ 'required': ['id', 'name', 'enabled'],
+ 'additionalProperties': False,
+ },
+}
+
+_role_properties = {
+ 'name': parameter_types.name,
+}
+
+_user_properties = {
+ 'id': parameter_types.id_string,
+ 'name': parameter_types.name,
+ 'username': parameter_types.name,
+ 'roles': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'object',
+ 'properties': _role_properties,
+ 'required': ['name'],
+ 'additionalProperties': False,
+ },
+ },
+ 'roles_links': {
+ 'type': 'array',
+ 'maxItems': 0,
+ },
+}
+
+_metadata_properties = {
+ 'is_admin': {'type': 'integer'},
+ 'roles': {
+ 'type': 'array',
+ 'items': {'type': 'string'},
+ },
+}
+
+_endpoint_properties = {
+ 'id': {'type': 'string'},
+ 'adminURL': parameter_types.url,
+ 'internalURL': parameter_types.url,
+ 'publicURL': parameter_types.url,
+ 'region': {'type': 'string'},
+}
+
+_service_properties = {
+ 'type': {'type': 'string'},
+ 'name': parameter_types.name,
+ 'endpoints_links': {
+ 'type': 'array',
+ 'maxItems': 0,
+ },
+ 'endpoints': {
+ 'type': 'array',
+ 'minItems': 1,
+ 'items': {
+ 'type': 'object',
+ 'properties': _endpoint_properties,
+ 'required': ['id', 'publicURL'],
+ 'additionalProperties': False,
+ },
+ },
+}
+
+_base_access_properties = {
+ 'metadata': {
+ 'type': 'object',
+ 'properties': _metadata_properties,
+ 'required': ['is_admin', 'roles'],
+ 'additionalProperties': False,
+ },
+ 'serviceCatalog': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'object',
+ 'properties': _service_properties,
+ 'required': ['name', 'type', 'endpoints_links', 'endpoints'],
+ 'additionalProperties': False,
+ },
+ },
+ 'token': {
+ 'type': 'object',
+ 'properties': _token_properties,
+ 'required': ['audit_ids', 'id', 'expires', 'issued_at'],
+ 'additionalProperties': False,
+ },
+ 'user': {
+ 'type': 'object',
+ 'properties': _user_properties,
+ 'required': ['id', 'name', 'username', 'roles', 'roles_links'],
+ 'additionalProperties': False,
+ },
+}
+
+_unscoped_access_properties = copy.deepcopy(_base_access_properties)
+unscoped_metadata = _unscoped_access_properties['metadata']
+unscoped_metadata['properties']['roles']['maxItems'] = 0
+_unscoped_access_properties['user']['properties']['roles']['maxItems'] = 0
+_unscoped_access_properties['serviceCatalog']['maxItems'] = 0
+
+_scoped_access_properties = copy.deepcopy(_base_access_properties)
+_scoped_access_properties['metadata']['properties']['roles']['minItems'] = 1
+_scoped_access_properties['serviceCatalog']['minItems'] = 1
+_scoped_access_properties['user']['properties']['roles']['minItems'] = 1
+
+base_token_schema = {
+ 'type': 'object',
+ 'required': ['metadata', 'user', 'serviceCatalog', 'token'],
+ 'additionalProperties': False,
+}
+
+unscoped_token_schema = copy.deepcopy(base_token_schema)
+unscoped_token_schema['properties'] = _unscoped_access_properties
+
+scoped_token_schema = copy.deepcopy(base_token_schema)
+scoped_token_schema['properties'] = _scoped_access_properties
+
+# Validator objects
+unscoped_validator = validators.SchemaValidator(unscoped_token_schema)
+scoped_validator = validators.SchemaValidator(scoped_token_schema)
diff --git a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
index 24fc82dd..79065863 100644
--- a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
+++ b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py
@@ -15,24 +15,25 @@
import copy
import uuid
+import mock
+from oslo_log import versionutils
from six.moves import http_client
from testtools import matchers
+from keystone.contrib.endpoint_filter import routers
+from keystone.tests import unit
from keystone.tests.unit import test_v3
-class TestExtensionCase(test_v3.RestfulTestCase):
-
- EXTENSION_NAME = 'endpoint_filter'
- EXTENSION_TO_ADD = 'endpoint_filter_extension'
+class EndpointFilterTestCase(test_v3.RestfulTestCase):
def config_overrides(self):
- super(TestExtensionCase, self).config_overrides()
+ super(EndpointFilterTestCase, self).config_overrides()
self.config_fixture.config(
group='catalog', driver='endpoint_filter.sql')
def setUp(self):
- super(TestExtensionCase, self).setUp()
+ super(EndpointFilterTestCase, self).setUp()
self.default_request_url = (
'/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
@@ -40,7 +41,17 @@ class TestExtensionCase(test_v3.RestfulTestCase):
'endpoint_id': self.endpoint_id})
-class EndpointFilterCRUDTestCase(TestExtensionCase):
+class EndpointFilterDeprecateTestCase(test_v3.RestfulTestCase):
+
+ @mock.patch.object(versionutils, 'report_deprecated_feature')
+ def test_exception_happens(self, mock_deprecator):
+ routers.EndpointFilterExtension(mock.ANY)
+ mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("Remove endpoint_filter_extension from", args[1])
+
+
+class EndpointFilterCRUDTestCase(EndpointFilterTestCase):
def test_create_endpoint_project_association(self):
"""PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
@@ -48,8 +59,7 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
Valid endpoint and project id test case.
"""
- self.put(self.default_request_url,
- expected_status=204)
+ self.put(self.default_request_url)
def test_create_endpoint_project_association_with_invalid_project(self):
"""PUT OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
@@ -82,8 +92,7 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
"""
self.put(self.default_request_url,
- body={'project_id': self.default_domain_project_id},
- expected_status=204)
+ body={'project_id': self.default_domain_project_id})
def test_check_endpoint_project_association(self):
"""HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
@@ -91,13 +100,11 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
Valid project and endpoint id test case.
"""
- self.put(self.default_request_url,
- expected_status=204)
+ self.put(self.default_request_url)
self.head('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
def test_check_endpoint_project_association_with_invalid_project(self):
"""HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
@@ -169,8 +176,7 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
"""
r = self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
- {'endpoint_id': self.endpoint_id},
- expected_status=200)
+ {'endpoint_id': self.endpoint_id})
self.assertValidProjectListResponse(r, expected_length=0)
def test_list_projects_associated_with_invalid_endpoint(self):
@@ -193,8 +199,7 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
self.delete('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.default_domain_project_id,
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
def test_remove_endpoint_project_association_with_invalid_project(self):
"""DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}
@@ -226,35 +231,167 @@ class EndpointFilterCRUDTestCase(TestExtensionCase):
self.put(self.default_request_url)
association_url = ('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' %
{'endpoint_id': self.endpoint_id})
- r = self.get(association_url, expected_status=200)
+ r = self.get(association_url)
self.assertValidProjectListResponse(r, expected_length=1)
self.delete('/projects/%(project_id)s' % {
'project_id': self.default_domain_project_id})
- r = self.get(association_url, expected_status=200)
+ r = self.get(association_url)
self.assertValidProjectListResponse(r, expected_length=0)
def test_endpoint_project_association_cleanup_when_endpoint_deleted(self):
self.put(self.default_request_url)
association_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % {
'project_id': self.default_domain_project_id}
- r = self.get(association_url, expected_status=200)
+ r = self.get(association_url)
self.assertValidEndpointListResponse(r, expected_length=1)
self.delete('/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
- r = self.get(association_url, expected_status=200)
+ r = self.get(association_url)
self.assertValidEndpointListResponse(r, expected_length=0)
+ @unit.skip_if_cache_disabled('catalog')
+ def test_create_endpoint_project_association_invalidates_cache(self):
+ # NOTE(davechen): create another endpoint which will be added to
+ # default project, this should be done at first since
+ # `create_endpoint` will also invalidate cache.
+ endpoint_id2 = uuid.uuid4().hex
+ endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
+ region_id=self.region_id,
+ interface='public',
+ id=endpoint_id2)
+ self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
+
+ # create endpoint project association.
+ self.put(self.default_request_url)
-class EndpointFilterTokenRequestTestCase(TestExtensionCase):
+ # should get back only one endpoint that was just created.
+ user_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ # there is only one endpoints associated with the default project.
+ self.assertEqual(1, len(catalog[0]['endpoints']))
+ self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
+
+ # add the second endpoint to default project, bypassing
+ # catalog_api API manager.
+ self.catalog_api.driver.add_endpoint_to_project(
+ endpoint_id2,
+ self.default_domain_project_id)
+
+ # but, we can just get back one endpoint from the cache, since the
+ # catalog is pulled out from cache and its haven't been invalidated.
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertEqual(1, len(catalog[0]['endpoints']))
+
+ # remove the endpoint2 from the default project, and add it again via
+ # catalog_api API manager.
+ self.catalog_api.driver.remove_endpoint_from_project(
+ endpoint_id2,
+ self.default_domain_project_id)
+
+ # add second endpoint to default project, this can be done by calling
+ # the catalog_api API manager directly but call the REST API
+ # instead for consistency.
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': endpoint_id2})
+
+ # should get back two endpoints since the cache has been
+ # invalidated when the second endpoint was added to default project.
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertEqual(2, len(catalog[0]['endpoints']))
+
+ ep_id_list = [catalog[0]['endpoints'][0]['id'],
+ catalog[0]['endpoints'][1]['id']]
+ self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
+
+ @unit.skip_if_cache_disabled('catalog')
+ def test_remove_endpoint_from_project_invalidates_cache(self):
+ endpoint_id2 = uuid.uuid4().hex
+ endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
+ region_id=self.region_id,
+ interface='public',
+ id=endpoint_id2)
+ self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
+ # create endpoint project association.
+ self.put(self.default_request_url)
+
+ # add second endpoint to default project.
+ self.put('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': endpoint_id2})
+
+ # should get back only one endpoint that was just created.
+ user_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ # there are two endpoints associated with the default project.
+ ep_id_list = [catalog[0]['endpoints'][0]['id'],
+ catalog[0]['endpoints'][1]['id']]
+ self.assertEqual(2, len(catalog[0]['endpoints']))
+ self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
+
+ # remove the endpoint2 from the default project, bypassing
+ # catalog_api API manager.
+ self.catalog_api.driver.remove_endpoint_from_project(
+ endpoint_id2,
+ self.default_domain_project_id)
+
+ # but, we can just still get back two endpoints from the cache,
+ # since the catalog is pulled out from cache and its haven't
+ # been invalidated.
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertEqual(2, len(catalog[0]['endpoints']))
+
+ # add back the endpoint2 to the default project, and remove it by
+ # catalog_api API manage.
+ self.catalog_api.driver.add_endpoint_to_project(
+ endpoint_id2,
+ self.default_domain_project_id)
+
+ # remove the endpoint2 from the default project, this can be done
+ # by calling the catalog_api API manager directly but call
+ # the REST API instead for consistency.
+ self.delete('/OS-EP-FILTER/projects/%(project_id)s'
+ '/endpoints/%(endpoint_id)s' % {
+ 'project_id': self.default_domain_project_id,
+ 'endpoint_id': endpoint_id2})
+
+ # should only get back one endpoint since the cache has been
+ # invalidated after the endpoint project association was removed.
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertEqual(1, len(catalog[0]['endpoints']))
+ self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
+
+
+class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase):
def test_project_scoped_token_using_endpoint_filter(self):
"""Verify endpoints from project scoped token filtered."""
# create a project to work with
- ref = self.new_project_ref(domain_id=self.domain_id)
+ ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': ref})
project = self.assertValidProjectResponse(r, ref)
@@ -276,8 +413,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': project['id'],
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
# attempt to authenticate without requesting a project
auth_data = self.build_authentication_request(
@@ -289,7 +425,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
- self.assertEqual(r.result['token']['project']['id'], project['id'])
+ self.assertEqual(project['id'], r.result['token']['project']['id'])
def test_default_scoped_token_using_endpoint_filter(self):
"""Verify endpoints from default scoped token filtered."""
@@ -297,8 +433,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
@@ -310,16 +445,24 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
- self.assertEqual(r.result['token']['project']['id'],
- self.project['id'])
+ self.assertEqual(self.project['id'],
+ r.result['token']['project']['id'])
+
+ # Ensure name of the service exists
+ self.assertIn('name', r.result['token']['catalog'][0])
+
+ # region and region_id should be the same in endpoints
+ endpoint = r.result['token']['catalog'][0]['endpoints'][0]
+ self.assertIn('region', endpoint)
+ self.assertIn('region_id', endpoint)
+ self.assertEqual(endpoint['region'], endpoint['region_id'])
def test_scoped_token_with_no_catalog_using_endpoint_filter(self):
"""Verify endpoint filter does not affect no catalog."""
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
@@ -329,8 +472,8 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.assertValidProjectScopedTokenResponse(
r,
require_catalog=False)
- self.assertEqual(r.result['token']['project']['id'],
- self.project['id'])
+ self.assertEqual(self.project['id'],
+ r.result['token']['project']['id'])
def test_invalid_endpoint_project_association(self):
"""Verify an invalid endpoint-project association is handled."""
@@ -338,28 +481,26 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
# create a second temporary endpoint
- self.endpoint_id2 = uuid.uuid4().hex
- self.endpoint2 = self.new_endpoint_ref(service_id=self.service_id)
- self.endpoint2['id'] = self.endpoint_id2
- self.catalog_api.create_endpoint(
- self.endpoint_id2,
- self.endpoint2.copy())
+ endpoint_id2 = uuid.uuid4().hex
+ endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
+ region_id=self.region_id,
+ interface='public',
+ id=endpoint_id2)
+ self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy())
# add second endpoint to default project
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id2},
- expected_status=204)
+ 'endpoint_id': endpoint_id2})
# remove the temporary reference
# this will create inconsistency in the endpoint filter table
# which is fixed during the catalog creation for token request
- self.catalog_api.delete_endpoint(self.endpoint_id2)
+ self.catalog_api.delete_endpoint(endpoint_id2)
auth_data = self.build_authentication_request(
user_id=self.user['id'],
@@ -371,8 +512,8 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
require_catalog=True,
endpoint_filter=True,
ep_filter_assoc=1)
- self.assertEqual(r.result['token']['project']['id'],
- self.project['id'])
+ self.assertEqual(self.project['id'],
+ r.result['token']['project']['id'])
def test_disabled_endpoint(self):
"""Test that a disabled endpoint is handled."""
@@ -380,8 +521,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
# Add a disabled endpoint to the default project.
@@ -399,8 +539,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': disabled_endpoint_id},
- expected_status=204)
+ 'endpoint_id': disabled_endpoint_id})
# Authenticate to get token with catalog
auth_data = self.build_authentication_request(
@@ -416,7 +555,9 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
def test_multiple_endpoint_project_associations(self):
def _create_an_endpoint():
- endpoint_ref = self.new_endpoint_ref(service_id=self.service_id)
+ endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
r = self.post('/endpoints', body={'endpoint': endpoint_ref})
return r.result['endpoint']['id']
@@ -429,13 +570,11 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': endpoint_id1},
- expected_status=204)
+ 'endpoint_id': endpoint_id1})
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': endpoint_id2},
- expected_status=204)
+ 'endpoint_id': endpoint_id2})
# there should be only two endpoints in token catalog
auth_data = self.build_authentication_request(
@@ -454,8 +593,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
self.put('/OS-EP-FILTER/projects/%(project_id)s'
'/endpoints/%(endpoint_id)s' % {
'project_id': self.project['id'],
- 'endpoint_id': self.endpoint_id},
- expected_status=204)
+ 'endpoint_id': self.endpoint_id})
auth_data = self.build_authentication_request(
user_id=self.user['id'],
@@ -474,7 +612,7 @@ class EndpointFilterTokenRequestTestCase(TestExtensionCase):
auth_catalog.result['catalog'])
-class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
+class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin):
JSON_HOME_DATA = {
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/'
'1.0/rel/endpoint_projects': {
@@ -545,7 +683,7 @@ class JsonHomeTests(TestExtensionCase, test_v3.JsonHomeTestMixin):
}
-class EndpointGroupCRUDTestCase(TestExtensionCase):
+class EndpointGroupCRUDTestCase(EndpointFilterTestCase):
DEFAULT_ENDPOINT_GROUP_BODY = {
'endpoint_group': {
@@ -638,7 +776,7 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % {
'endpoint_group_id': endpoint_group_id}
- self.head(url, expected_status=200)
+ self.head(url, expected_status=http_client.OK)
def test_check_invalid_endpoint_group(self):
"""HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}
@@ -832,7 +970,7 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
self.project_id)
url = self._get_project_endpoint_group_url(
endpoint_group_id, self.project_id)
- self.head(url, expected_status=200)
+ self.head(url, expected_status=http_client.OK)
def test_check_endpoint_group_to_project_with_invalid_project_id(self):
"""Test HEAD with an invalid endpoint group and project association."""
@@ -891,7 +1029,7 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
"""
# create a service
- service_ref = self.new_service_ref()
+ service_ref = unit.new_service_ref()
response = self.post(
'/services',
body={'service': service_ref})
@@ -899,10 +1037,10 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
service_id = response.result['service']['id']
# create an endpoint
- endpoint_ref = self.new_endpoint_ref(service_id=service_id)
- response = self.post(
- '/endpoints',
- body={'endpoint': endpoint_ref})
+ endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
+ interface='public',
+ region_id=self.region_id)
+ response = self.post('/endpoints', body={'endpoint': endpoint_ref})
endpoint_id = response.result['endpoint']['id']
# create an endpoint group
@@ -929,7 +1067,7 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
"""
# create a temporary service
- service_ref = self.new_service_ref()
+ service_ref = unit.new_service_ref()
response = self.post('/services', body={'service': service_ref})
service_id2 = response.result['service']['id']
@@ -957,7 +1095,16 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
'project_id': self.default_domain_project_id}
r = self.get(endpoints_url)
endpoints = self.assertValidEndpointListResponse(r)
- self.assertEqual(len(endpoints), 2)
+ self.assertEqual(2, len(endpoints))
+
+ # Ensure catalog includes the endpoints from endpoint_group project
+ # association, this is needed when a project scoped token is issued
+ # and "endpoint_filter.sql" backend driver is in place.
+ user_id = uuid.uuid4().hex
+ catalog_list = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+ self.assertEqual(2, len(catalog_list))
# Now remove project endpoint group association
url = self._get_project_endpoint_group_url(
@@ -971,7 +1118,12 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
r = self.get(endpoints_url)
endpoints = self.assertValidEndpointListResponse(r)
- self.assertEqual(len(endpoints), 1)
+ self.assertEqual(1, len(endpoints))
+
+ catalog_list = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+ self.assertEqual(1, len(catalog_list))
def test_endpoint_group_project_cleanup_with_project(self):
# create endpoint group
@@ -979,7 +1131,7 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# create new project and associate with endpoint_group
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': project_ref})
project = self.assertValidProjectResponse(r, project_ref)
url = self._get_project_endpoint_group_url(endpoint_group_id,
@@ -1001,7 +1153,7 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
# create new project and associate with endpoint_group
- project_ref = self.new_project_ref(domain_id=self.domain_id)
+ project_ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': project_ref})
project = self.assertValidProjectResponse(r, project_ref)
url = self._get_project_endpoint_group_url(endpoint_group_id,
@@ -1049,6 +1201,153 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
self.get(project_endpoint_group_url,
expected_status=http_client.NOT_FOUND)
+ @unit.skip_if_cache_disabled('catalog')
+ def test_add_endpoint_group_to_project_invalidates_catalog_cache(self):
+ # create another endpoint with 'admin' interface which matches
+ # 'filters' definition in endpoint group, then there should be two
+ # endpoints returned when retrieving v3 catalog if cache works as
+ # expected.
+ # this should be done at first since `create_endpoint` will also
+ # invalidate cache.
+ endpoint_id2 = uuid.uuid4().hex
+ endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
+ region_id=self.region_id,
+ interface='admin',
+ id=endpoint_id2)
+ self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
+
+ # create a project and endpoint association.
+ self.put(self.default_request_url)
+
+ # there is only one endpoint associated with the default project.
+ user_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
+
+ # create an endpoint group.
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # add the endpoint group to default project, bypassing
+ # catalog_api API manager.
+ self.catalog_api.driver.add_endpoint_group_to_project(
+ endpoint_group_id,
+ self.default_domain_project_id)
+
+ # can get back only one endpoint from the cache, since the catalog
+ # is pulled out from cache.
+ invalid_catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertThat(invalid_catalog[0]['endpoints'],
+ matchers.HasLength(1))
+ self.assertEqual(catalog, invalid_catalog)
+
+ # remove the endpoint group from default project, and add it again via
+ # catalog_api API manager.
+ self.catalog_api.driver.remove_endpoint_group_from_project(
+ endpoint_group_id,
+ self.default_domain_project_id)
+
+ # add the endpoint group to default project.
+ self.catalog_api.add_endpoint_group_to_project(
+ endpoint_group_id,
+ self.default_domain_project_id)
+
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ # now, it will return 2 endpoints since the cache has been
+ # invalidated.
+ self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
+
+ ep_id_list = [catalog[0]['endpoints'][0]['id'],
+ catalog[0]['endpoints'][1]['id']]
+ self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
+
+ @unit.skip_if_cache_disabled('catalog')
+ def test_remove_endpoint_group_from_project_invalidates_cache(self):
+ # create another endpoint with 'admin' interface which matches
+ # 'filters' definition in endpoint group, then there should be two
+ # endpoints returned when retrieving v3 catalog. But only one
+ # endpoint will return after the endpoint group's deletion if cache
+ # works as expected.
+ # this should be done at first since `create_endpoint` will also
+ # invalidate cache.
+ endpoint_id2 = uuid.uuid4().hex
+ endpoint2 = unit.new_endpoint_ref(service_id=self.service_id,
+ region_id=self.region_id,
+ interface='admin',
+ id=endpoint_id2)
+ self.catalog_api.create_endpoint(endpoint_id2, endpoint2)
+
+ # create project and endpoint association.
+ self.put(self.default_request_url)
+
+ # create an endpoint group.
+ endpoint_group_id = self._create_valid_endpoint_group(
+ self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY)
+
+ # add the endpoint group to default project.
+ self.catalog_api.add_endpoint_group_to_project(
+ endpoint_group_id,
+ self.default_domain_project_id)
+
+ # should get back two endpoints, one from endpoint project
+ # association, the other one is from endpoint_group project
+ # association.
+ user_id = uuid.uuid4().hex
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
+
+ ep_id_list = [catalog[0]['endpoints'][0]['id'],
+ catalog[0]['endpoints'][1]['id']]
+ self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list)
+
+ # remove endpoint_group project association, bypassing
+ # catalog_api API manager.
+ self.catalog_api.driver.remove_endpoint_group_from_project(
+ endpoint_group_id,
+ self.default_domain_project_id)
+
+ # still get back two endpoints, since the catalog is pulled out
+ # from cache and the cache haven't been invalidated.
+ invalid_catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertThat(invalid_catalog[0]['endpoints'],
+ matchers.HasLength(2))
+ self.assertEqual(catalog, invalid_catalog)
+
+ # add back the endpoint_group project association and remove it from
+ # manager.
+ self.catalog_api.driver.add_endpoint_group_to_project(
+ endpoint_group_id,
+ self.default_domain_project_id)
+
+ self.catalog_api.remove_endpoint_group_from_project(
+ endpoint_group_id,
+ self.default_domain_project_id)
+
+ # should only get back one endpoint since the cache has been
+ # invalidated after the endpoint_group project association was
+ # removed.
+ catalog = self.catalog_api.get_v3_catalog(
+ user_id,
+ self.default_domain_project_id)
+
+ self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
+ self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id'])
+
def _create_valid_endpoint_group(self, url, body):
r = self.post(url, body=body)
return r.result['endpoint_group']['id']
@@ -1072,13 +1371,15 @@ class EndpointGroupCRUDTestCase(TestExtensionCase):
"""Creates an endpoint associated with service and project."""
if not service_id:
# create a new service
- service_ref = self.new_service_ref()
+ service_ref = unit.new_service_ref()
response = self.post(
'/services', body={'service': service_ref})
service_id = response.result['service']['id']
# create endpoint
- endpoint_ref = self.new_endpoint_ref(service_id=service_id)
+ endpoint_ref = unit.new_endpoint_ref(service_id=service_id,
+ interface='public',
+ region_id=self.region_id)
response = self.post('/endpoints', body={'endpoint': endpoint_ref})
endpoint = response.result['endpoint']
diff --git a/keystone-moon/keystone/tests/unit/test_auth.py b/keystone-moon/keystone/tests/unit/test_auth.py
index 6dd52c8a..6f44b316 100644
--- a/keystone-moon/keystone/tests/unit/test_auth.py
+++ b/keystone-moon/keystone/tests/unit/test_auth.py
@@ -14,6 +14,8 @@
import copy
import datetime
+import random
+import string
import uuid
import mock
@@ -26,11 +28,12 @@ from testtools import matchers
from keystone import assignment
from keystone import auth
from keystone.common import authorization
-from keystone import config
+from keystone.common import config
from keystone import exception
from keystone.models import token_model
from keystone.tests import unit
from keystone.tests.unit import default_fixtures
+from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import database
from keystone import token
from keystone.token import provider
@@ -39,9 +42,10 @@ from keystone import trust
CONF = cfg.CONF
TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
-DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
-HOST_URL = 'http://keystone:5001'
+HOST = ''.join(random.choice(string.ascii_lowercase) for x in range(
+ random.randint(5, 15)))
+HOST_URL = 'http://%s' % (HOST)
def _build_user_auth(token=None, user_id=None, username=None,
@@ -127,9 +131,7 @@ class AuthBadRequests(AuthTest):
context={}, auth={})
def test_empty_remote_user(self):
- """Verify that _authenticate_external() raises exception if
- REMOTE_USER is set as the empty string.
- """
+ """Verify exception is raised when REMOTE_USER is an empty string."""
context = {'environment': {'REMOTE_USER': ''}}
self.assertRaises(
token.controllers.ExternalAuthNotApplicable,
@@ -223,6 +225,36 @@ class AuthBadRequests(AuthTest):
self.controller.authenticate,
{}, body_dict)
+ def test_authenticate_fails_if_project_unsafe(self):
+ """Verify authenticate to a project with unsafe name fails."""
+ # Start with url name restrictions off, so we can create the unsafe
+ # named project
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='off')
+ unsafe_name = 'i am not / safe'
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id, name=unsafe_name)
+ self.resource_api.create_project(project['id'], project)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_foo['id'], project['id'], self.role_member['id'])
+ no_context = {}
+
+ body_dict = _build_user_auth(
+ username=self.user_foo['name'],
+ password=self.user_foo['password'],
+ tenant_name=project['name'])
+
+ # Since name url restriction is off, we should be able to autenticate
+ self.controller.authenticate(no_context, body_dict)
+
+ # Set the name url restriction to strict and we should fail to
+ # authenticate
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='strict')
+ self.assertRaises(exception.Unauthorized,
+ self.controller.authenticate,
+ no_context, body_dict)
+
class AuthWithToken(AuthTest):
def test_unscoped_token(self):
@@ -286,7 +318,7 @@ class AuthWithToken(AuthTest):
def test_auth_scoped_token_bad_project_with_debug(self):
"""Authenticating with an invalid project fails."""
- # Bug 1379952 reports poor user feedback, even in debug mode,
+ # Bug 1379952 reports poor user feedback, even in insecure_debug mode,
# when the user accidentally passes a project name as an ID.
# This test intentionally does exactly that.
body_dict = _build_user_auth(
@@ -294,8 +326,8 @@ class AuthWithToken(AuthTest):
password=self.user_foo['password'],
tenant_id=self.tenant_bar['name'])
- # with debug enabled, this produces a friendly exception.
- self.config_fixture.config(debug=True)
+ # with insecure_debug enabled, this produces a friendly exception.
+ self.config_fixture.config(debug=True, insecure_debug=True)
e = self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
@@ -308,7 +340,7 @@ class AuthWithToken(AuthTest):
def test_auth_scoped_token_bad_project_without_debug(self):
"""Authenticating with an invalid project fails."""
- # Bug 1379952 reports poor user feedback, even in debug mode,
+ # Bug 1379952 reports poor user feedback, even in insecure_debug mode,
# when the user accidentally passes a project name as an ID.
# This test intentionally does exactly that.
body_dict = _build_user_auth(
@@ -316,8 +348,8 @@ class AuthWithToken(AuthTest):
password=self.user_foo['password'],
tenant_id=self.tenant_bar['name'])
- # with debug disabled, authentication failure details are suppressed.
- self.config_fixture.config(debug=False)
+ # with insecure_debug disabled (the default), authentication failure
+ # details are suppressed.
e = self.assertRaises(
exception.Unauthorized,
self.controller.authenticate,
@@ -336,9 +368,9 @@ class AuthWithToken(AuthTest):
self.tenant_bar['id'],
self.role_member['id'])
# Now create a group role for this user as well
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain1 = unit.new_domain_ref()
self.resource_api.create_domain(domain1['id'], domain1)
- new_group = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex}
+ new_group = unit.new_group_ref(domain_id=domain1['id'])
new_group = self.identity_api.create_group(new_group)
self.identity_api.add_user_to_group(self.user_foo['id'],
new_group['id'])
@@ -428,10 +460,10 @@ class AuthWithToken(AuthTest):
def test_deleting_role_revokes_token(self):
role_controller = assignment.controllers.Role()
- project1 = {'id': 'Project1', 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
+ project1 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
self.resource_api.create_project(project1['id'], project1)
- role_one = {'id': 'role_one', 'name': uuid.uuid4().hex}
+ role_one = unit.new_role_ref(id='role_one')
self.role_api.create_role(role_one['id'], role_one)
self.assignment_api.add_role_to_user_and_project(
self.user_foo['id'], project1['id'], role_one['id'])
@@ -464,12 +496,10 @@ class AuthWithToken(AuthTest):
no_context = {}
admin_context = dict(is_admin=True, query_string={})
- project = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID}
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
self.resource_api.create_project(project['id'], project)
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
self.assignment_api.add_role_to_user_and_project(
self.user_foo['id'], project['id'], role['id'])
@@ -642,6 +672,27 @@ class AuthWithToken(AuthTest):
token_id=token_2_id)
+class FernetAuthWithToken(AuthWithToken):
+ def config_overrides(self):
+ super(FernetAuthWithToken, self).config_overrides()
+ self.config_fixture.config(group='token', provider='fernet')
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ def test_token_auth_with_binding(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
+ body_dict = _build_user_auth()
+ self.assertRaises(exception.NotImplemented,
+ self.controller.authenticate,
+ self.context_with_remote_user,
+ body_dict)
+
+ def test_revoke_with_no_audit_info(self):
+ self.skipTest('Fernet with v2.0 and revocation is broken')
+
+ def test_deleting_role_revokes_token(self):
+ self.skipTest('Fernet with v2.0 and revocation is broken')
+
+
class AuthWithPasswordCredentials(AuthTest):
def test_auth_invalid_user(self):
"""Verify exception is raised if invalid user."""
@@ -682,7 +733,7 @@ class AuthWithPasswordCredentials(AuthTest):
{}, body_dict)
def test_authenticate_blank_password_credentials(self):
- """Sending empty dict as passwordCredentials raises a 400 error."""
+ """Sending empty dict as passwordCredentials raises 400 Bad Requset."""
body_dict = {'passwordCredentials': {}, 'tenantName': 'demo'}
self.assertRaises(exception.ValidationError,
self.controller.authenticate,
@@ -708,27 +759,16 @@ class AuthWithPasswordCredentials(AuthTest):
# user in auth data is from the new default domain.
# 1) Create a new domain.
- new_domain_id = uuid.uuid4().hex
- new_domain = {
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'id': new_domain_id,
- 'name': uuid.uuid4().hex,
- }
+ new_domain = unit.new_domain_ref()
+ new_domain_id = new_domain['id']
self.resource_api.create_domain(new_domain_id, new_domain)
# 2) Create user "foo" in new domain with different password than
# default-domain foo.
- new_user_password = uuid.uuid4().hex
- new_user = {
- 'name': self.user_foo['name'],
- 'domain_id': new_domain_id,
- 'password': new_user_password,
- 'email': 'foo@bar2.com',
- }
-
- new_user = self.identity_api.create_user(new_user)
+ new_user = unit.create_user(self.identity_api,
+ name=self.user_foo['name'],
+ domain_id=new_domain_id)
# 3) Update the default_domain_id config option to the new domain
@@ -739,7 +779,7 @@ class AuthWithPasswordCredentials(AuthTest):
body_dict = _build_user_auth(
username=self.user_foo['name'],
- password=new_user_password)
+ password=new_user['password'])
# The test is successful if this doesn't raise, so no need to assert.
self.controller.authenticate({}, body_dict)
@@ -856,7 +896,16 @@ class AuthWithTrust(AuthTest):
token_id=token_id,
token_data=self.token_provider_api.validate_token(token_id))
auth_context = authorization.token_to_auth_context(token_ref)
- return {'environment': {authorization.AUTH_CONTEXT_ENV: auth_context},
+ # NOTE(gyee): if public_endpoint and admin_endpoint are not set, which
+ # is the default, the base url will be constructed from the environment
+ # variables wsgi.url_scheme, SERVER_NAME, SERVER_PORT, and SCRIPT_NAME.
+ # We have to set them in the context so the base url can be constructed
+ # accordingly.
+ return {'environment': {authorization.AUTH_CONTEXT_ENV: auth_context,
+ 'wsgi.url_scheme': 'http',
+ 'SCRIPT_NAME': '/v3',
+ 'SERVER_PORT': '80',
+ 'SERVER_NAME': HOST},
'token_id': token_id,
'host_url': HOST_URL}
@@ -945,8 +994,9 @@ class AuthWithTrust(AuthTest):
expires_at="2010-06-04T08:44:31.999999Z")
def test_create_trust_without_project_id(self):
- """Verify that trust can be created without project id and
- token can be generated with that trust.
+ """Verify that trust can be created without project id.
+
+ Also, token can be generated with that trust.
"""
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = self._create_auth_context(
@@ -977,9 +1027,7 @@ class AuthWithTrust(AuthTest):
self.assertIn(role['id'], role_ids)
def test_get_trust_without_auth_context(self):
- """Verify that a trust cannot be retrieved when the auth context is
- missing.
- """
+ """Verify a trust cannot be retrieved if auth context is missing."""
unscoped_token = self.get_unscoped_token(self.trustor['name'])
context = self._create_auth_context(
unscoped_token['access']['token']['id'])
@@ -1001,8 +1049,6 @@ class AuthWithTrust(AuthTest):
token_user = auth_response['access']['user']
self.assertEqual(token_user['id'], new_trust['trustee_user_id'])
- # TODO(ayoung): Endpoints
-
def test_create_trust_impersonation(self):
new_trust = self.create_trust(self.sample_data, self.trustor['name'])
self.assertEqual(self.trustor['id'], new_trust['trustor_user_id'])
@@ -1131,7 +1177,7 @@ class AuthWithTrust(AuthTest):
request_body = _build_user_auth(token={'id': trust_token_id},
tenant_id=self.tenant_bar['id'])
self.assertRaises(
- exception.Forbidden,
+ exception.Unauthorized,
self.controller.authenticate, {}, request_body)
def test_delete_trust_revokes_token(self):
@@ -1211,35 +1257,6 @@ class AuthWithTrust(AuthTest):
new_trust['id'])['trust']
self.assertEqual(3, trust['remaining_uses'])
- def test_v2_trust_token_contains_trustor_user_id_and_impersonation(self):
- new_trust = self.create_trust(self.sample_data, self.trustor['name'])
- auth_response = self.fetch_v2_token_from_trust(new_trust)
-
- self.assertEqual(new_trust['trustee_user_id'],
- auth_response['access']['trust']['trustee_user_id'])
- self.assertEqual(new_trust['trustor_user_id'],
- auth_response['access']['trust']['trustor_user_id'])
- self.assertEqual(new_trust['impersonation'],
- auth_response['access']['trust']['impersonation'])
- self.assertEqual(new_trust['id'],
- auth_response['access']['trust']['id'])
-
- validate_response = self.controller.validate_token(
- context=dict(is_admin=True, query_string={}),
- token_id=auth_response['access']['token']['id'])
- self.assertEqual(
- new_trust['trustee_user_id'],
- validate_response['access']['trust']['trustee_user_id'])
- self.assertEqual(
- new_trust['trustor_user_id'],
- validate_response['access']['trust']['trustor_user_id'])
- self.assertEqual(
- new_trust['impersonation'],
- validate_response['access']['trust']['impersonation'])
- self.assertEqual(
- new_trust['id'],
- validate_response['access']['trust']['id'])
-
def disable_user(self, user):
user['enabled'] = False
self.identity_api.update_user(user['id'], user)
@@ -1328,34 +1345,21 @@ class AuthCatalog(unit.SQLDriverOverrides, AuthTest):
def _create_endpoints(self):
def create_region(**kwargs):
- ref = {'id': uuid.uuid4().hex}
- ref.update(kwargs)
+ ref = unit.new_region_ref(**kwargs)
self.catalog_api.create_region(ref)
return ref
def create_endpoint(service_id, region, **kwargs):
- id_ = uuid.uuid4().hex
- ref = {
- 'id': id_,
- 'interface': 'public',
- 'region_id': region,
- 'service_id': service_id,
- 'url': 'http://localhost/%s' % uuid.uuid4().hex,
- }
- ref.update(kwargs)
- self.catalog_api.create_endpoint(id_, ref)
- return ref
+ endpoint = unit.new_endpoint_ref(region_id=region,
+ service_id=service_id, **kwargs)
+
+ self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ return endpoint
# Create a service for use with the endpoints.
def create_service(**kwargs):
- id_ = uuid.uuid4().hex
- ref = {
- 'id': id_,
- 'name': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- }
- ref.update(kwargs)
- self.catalog_api.create_service(id_, ref)
+ ref = unit.new_service_ref(**kwargs)
+ self.catalog_api.create_service(ref['id'], ref)
return ref
enabled_service_ref = create_service(enabled=True)
diff --git a/keystone-moon/keystone/tests/unit/test_auth_plugin.py b/keystone-moon/keystone/tests/unit/test_auth_plugin.py
index 8dd22aa8..f0862ed6 100644
--- a/keystone-moon/keystone/tests/unit/test_auth_plugin.py
+++ b/keystone-moon/keystone/tests/unit/test_auth_plugin.py
@@ -183,7 +183,7 @@ class TestMapped(unit.TestCase):
# make sure Mapped plugin got invoked with the correct payload
((context, auth_payload, auth_context),
kwargs) = authenticate.call_args
- self.assertEqual(auth_payload['protocol'], method_name)
+ self.assertEqual(method_name, auth_payload['protocol'])
def test_supporting_multiple_methods(self):
for method_name in ['saml2', 'openid', 'x509']:
diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
index 6c2181aa..f72cad63 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py
@@ -18,6 +18,7 @@ from six.moves import range
from testtools import matchers
from keystone import exception
+from keystone.tests import unit
class PolicyAssociationTests(object):
@@ -51,11 +52,11 @@ class PolicyAssociationTests(object):
5 - region 2, Service 0
"""
-
def new_endpoint(region_id, service_id):
- endpoint = {'id': uuid.uuid4().hex, 'interface': 'test',
- 'region_id': region_id, 'service_id': service_id,
- 'url': '/url'}
+ endpoint = unit.new_endpoint_ref(interface='test',
+ region_id=region_id,
+ service_id=service_id,
+ url='/url')
self.endpoint.append(self.catalog_api.create_endpoint(
endpoint['id'], endpoint))
@@ -63,18 +64,18 @@ class PolicyAssociationTests(object):
self.endpoint = []
self.service = []
self.region = []
+
+ parent_region_id = None
for i in range(3):
- policy = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex,
- 'blob': {'data': uuid.uuid4().hex}}
+ policy = unit.new_policy_ref()
self.policy.append(self.policy_api.create_policy(policy['id'],
policy))
- service = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex}
+ service = unit.new_service_ref()
self.service.append(self.catalog_api.create_service(service['id'],
service))
- region = {'id': uuid.uuid4().hex, 'description': uuid.uuid4().hex}
- # Link the 3 regions together as a hierarchy, [0] at the top
- if i != 0:
- region['parent_region_id'] = self.region[i - 1]['id']
+ region = unit.new_region_ref(parent_region_id=parent_region_id)
+ # Link the regions together as a hierarchy, [0] at the top
+ parent_region_id = region['id']
self.region.append(self.catalog_api.create_region(region))
new_endpoint(self.region[0]['id'], self.service[0]['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py b/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py
index 6b691e5a..e6635e18 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py
@@ -19,6 +19,7 @@ from testtools import matchers
from keystone.common import sql
from keystone.identity.mapping_backends import mapping
+from keystone.tests import unit
from keystone.tests.unit import identity_mapping as mapping_sql
from keystone.tests.unit import test_backend_sql
@@ -42,9 +43,9 @@ class SqlIDMapping(test_backend_sql.SqlTests):
def load_sample_data(self):
self.addCleanup(self.clean_sample_data)
- domainA = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domainA = unit.new_domain_ref()
self.domainA = self.resource_api.create_domain(domainA['id'], domainA)
- domainB = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domainB = unit.new_domain_ref()
self.domainB = self.resource_api.create_domain(domainB['id'], domainB)
def clean_sample_data(self):
diff --git a/keystone-moon/keystone/tests/unit/test_backend_kvs.py b/keystone-moon/keystone/tests/unit/test_backend_kvs.py
index 7406192a..36af1c36 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_kvs.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_kvs.py
@@ -14,20 +14,17 @@
import datetime
import uuid
-from oslo_config import cfg
from oslo_utils import timeutils
import six
from keystone.common import utils
from keystone import exception
from keystone.tests import unit
-from keystone.tests.unit import test_backend
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit.token import test_backends as token_tests
-CONF = cfg.CONF
-
-
-class KvsToken(unit.TestCase, test_backend.TokenTests):
+class KvsToken(unit.TestCase, token_tests.TokenTests):
def setUp(self):
super(KvsToken, self).setUp()
self.load_backends()
@@ -103,64 +100,11 @@ class KvsToken(unit.TestCase, test_backend.TokenTests):
self.assertEqual(expected_user_token_list, user_token_list)
-class KvsCatalog(unit.TestCase, test_backend.CatalogTests):
- def setUp(self):
- super(KvsCatalog, self).setUp()
- self.load_backends()
- self._load_fake_catalog()
-
- def config_overrides(self):
- super(KvsCatalog, self).config_overrides()
- self.config_fixture.config(group='catalog', driver='kvs')
-
- def _load_fake_catalog(self):
- self.catalog_foobar = self.catalog_api.driver._create_catalog(
- 'foo', 'bar',
- {'RegionFoo': {'service_bar': {'foo': 'bar'}}})
-
- def test_get_catalog_404(self):
- # FIXME(dolph): this test should be moved up to test_backend
- # FIXME(dolph): exceptions should be UserNotFound and ProjectNotFound
- self.assertRaises(exception.NotFound,
- self.catalog_api.get_catalog,
- uuid.uuid4().hex,
- 'bar')
-
- self.assertRaises(exception.NotFound,
- self.catalog_api.get_catalog,
- 'foo',
- uuid.uuid4().hex)
-
- def test_get_catalog(self):
- catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
- self.assertDictEqual(catalog_ref, self.catalog_foobar)
-
- def test_get_catalog_endpoint_disabled(self):
- # This test doesn't apply to KVS because with the KVS backend the
- # application creates the catalog (including the endpoints) for each
- # user and project. Whether endpoints are enabled or disabled isn't
- # a consideration.
- f = super(KvsCatalog, self).test_get_catalog_endpoint_disabled
- self.assertRaises(exception.NotFound, f)
-
- def test_get_v3_catalog_endpoint_disabled(self):
- # There's no need to have disabled endpoints in the kvs catalog. Those
- # endpoints should just be removed from the store. This just tests
- # what happens currently when the super impl is called.
- f = super(KvsCatalog, self).test_get_v3_catalog_endpoint_disabled
- self.assertRaises(exception.NotFound, f)
-
- def test_list_regions_filtered_by_parent_region_id(self):
- self.skipTest('KVS backend does not support hints')
-
- def test_service_filtering(self):
- self.skipTest("kvs backend doesn't support filtering")
-
-
class KvsTokenCacheInvalidation(unit.TestCase,
- test_backend.TokenCacheInvalidation):
+ token_tests.TokenCacheInvalidation):
def setUp(self):
super(KvsTokenCacheInvalidation, self).setUp()
+ self.useFixture(database.Database(self.sql_driver_version_overrides))
self.load_backends()
self._create_test_data()
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap.py b/keystone-moon/keystone/tests/unit/test_backend_ldap.py
index d96ec376..cf618633 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_ldap.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_ldap.py
@@ -20,11 +20,15 @@ import uuid
import ldap
import mock
from oslo_config import cfg
+from oslo_log import versionutils
+from oslotest import mockpatch
import pkg_resources
+from six.moves import http_client
from six.moves import range
from testtools import matchers
from keystone.common import cache
+from keystone.common import driver_hints
from keystone.common import ldap as common_ldap
from keystone.common.ldap import core as common_ldap_core
from keystone import exception
@@ -32,11 +36,14 @@ from keystone import identity
from keystone.identity.mapping_backends import mapping as map
from keystone import resource
from keystone.tests import unit
+from keystone.tests.unit.assignment import test_backends as assignment_tests
from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.identity import test_backends as identity_tests
from keystone.tests.unit import identity_mapping as mapping_sql
from keystone.tests.unit.ksfixtures import database
from keystone.tests.unit.ksfixtures import ldapdb
-from keystone.tests.unit import test_backend
+from keystone.tests.unit.resource import test_backends as resource_tests
+from keystone.tests.unit.utils import wip
CONF = cfg.CONF
@@ -115,7 +122,9 @@ def create_group_container(identity_api):
('ou', ['Groups'])])
-class BaseLDAPIdentity(test_backend.IdentityTests):
+class BaseLDAPIdentity(identity_tests.IdentityTests,
+ assignment_tests.AssignmentTests,
+ resource_tests.ResourceTests):
def setUp(self):
super(BaseLDAPIdentity, self).setUp()
@@ -123,6 +132,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.load_backends()
self.load_fixtures(default_fixtures)
+ self.config_fixture.config(group='os_inherit', enabled=False)
def _get_domain_fixture(self):
"""Domains in LDAP are read-only, so just return the static one."""
@@ -141,6 +151,13 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
return config_files
+ def new_user_ref(self, domain_id, project_id=None, **kwargs):
+ ref = unit.new_user_ref(domain_id=domain_id, project_id=project_id,
+ **kwargs)
+ if 'id' not in kwargs:
+ del ref['id']
+ return ref
+
def get_user_enabled_vals(self, user):
user_dn = (
self.identity_api.driver.user._id_to_dn_string(user['id']))
@@ -156,17 +173,13 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
return None
def test_build_tree(self):
- """Regression test for building the tree names
- """
+ """Regression test for building the tree names."""
user_api = identity.backends.ldap.UserApi(CONF)
self.assertTrue(user_api)
self.assertEqual("ou=Users,%s" % CONF.ldap.suffix, user_api.tree_dn)
def test_configurable_allowed_user_actions(self):
- user = {'name': u'fäké1',
- 'password': u'fäképass1',
- 'domain_id': CONF.identity.default_domain_id,
- 'tenants': ['bar']}
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
self.identity_api.get_user(user['id'])
@@ -185,10 +198,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
driver.user.allow_update = False
driver.user.allow_delete = False
- user = {'name': u'fäké1',
- 'password': u'fäképass1',
- 'domain_id': CONF.identity.default_domain_id,
- 'tenants': ['bar']}
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
self.assertRaises(exception.ForbiddenAction,
self.identity_api.create_user,
user)
@@ -215,7 +225,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_user_filter(self):
user_ref = self.identity_api.get_user(self.user_foo['id'])
self.user_foo.pop('password')
- self.assertDictEqual(user_ref, self.user_foo)
+ self.assertDictEqual(self.user_foo, user_ref)
driver = self.identity_api._select_identity_driver(
user_ref['domain_id'])
@@ -227,6 +237,20 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.identity_api.get_user,
self.user_foo['id'])
+ def test_list_users_by_name_and_with_filter(self):
+ # confirm that the user is not exposed when it does not match the
+ # filter setting in conf even if it is requested by name in user list
+ hints = driver_hints.Hints()
+ hints.add_filter('name', self.user_foo['name'])
+ domain_id = self.user_foo['domain_id']
+ driver = self.identity_api._select_identity_driver(domain_id)
+ driver.user.ldap_filter = ('(|(cn=%s)(cn=%s))' %
+ (self.user_sna['id'], self.user_two['id']))
+ users = self.identity_api.list_users(
+ domain_scope=self._set_domain_scope(domain_id),
+ hints=hints)
+ self.assertEqual(0, len(users))
+
def test_remove_role_grant_from_user_and_project(self):
self.assignment_api.create_grant(user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'],
@@ -234,7 +258,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
roles_ref = self.assignment_api.list_grants(
user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'])
- self.assertDictEqual(roles_ref[0], self.role_member)
+ self.assertDictEqual(self.role_member, roles_ref[0])
self.assignment_api.delete_grant(user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'],
@@ -251,11 +275,9 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_get_and_remove_role_grant_by_group_and_project(self):
new_domain = self._get_domain_fixture()
- new_group = {'domain_id': new_domain['id'],
- 'name': uuid.uuid4().hex}
+ new_group = unit.new_group_ref(domain_id=new_domain['id'])
new_group = self.identity_api.create_group(new_group)
- new_user = {'name': 'new_user', 'enabled': True,
- 'domain_id': new_domain['id']}
+ new_user = self.new_user_ref(domain_id=new_domain['id'])
new_user = self.identity_api.create_user(new_user)
self.identity_api.add_user_to_group(new_user['id'],
new_group['id'])
@@ -273,7 +295,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
group_id=new_group['id'],
project_id=self.tenant_bar['id'])
self.assertNotEmpty(roles_ref)
- self.assertDictEqual(roles_ref[0], self.role_member)
+ self.assertDictEqual(self.role_member, roles_ref[0])
self.assignment_api.delete_grant(group_id=new_group['id'],
project_id=self.tenant_bar['id'],
@@ -289,7 +311,44 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
role_id='member')
def test_get_and_remove_role_grant_by_group_and_domain(self):
- self.skipTest('N/A: LDAP does not support multiple domains')
+ # TODO(henry-nash): We should really rewrite the tests in
+ # unit.resource.test_backends to be more flexible as to where the
+ # domains are sourced from, so that we would not need to override such
+ # tests here. This is raised as bug 1373865.
+ new_domain = self._get_domain_fixture()
+ new_group = unit.new_group_ref(domain_id=new_domain['id'],)
+ new_group = self.identity_api.create_group(new_group)
+ new_user = self.new_user_ref(domain_id=new_domain['id'])
+ new_user = self.identity_api.create_user(new_user)
+ self.identity_api.add_user_to_group(new_user['id'],
+ new_group['id'])
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+
+ self.assignment_api.create_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertDictEqual(self.role_member, roles_ref[0])
+
+ self.assignment_api.delete_grant(group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
+ roles_ref = self.assignment_api.list_grants(
+ group_id=new_group['id'],
+ domain_id=new_domain['id'])
+ self.assertEqual(0, len(roles_ref))
+ self.assertRaises(exception.NotFound,
+ self.assignment_api.delete_grant,
+ group_id=new_group['id'],
+ domain_id=new_domain['id'],
+ role_id='member')
def test_get_role_assignment_by_domain_not_found(self):
self.skipTest('N/A: LDAP does not support multiple domains')
@@ -327,10 +386,12 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_delete_group_with_user_project_domain_links(self):
self.skipTest('N/A: LDAP does not support multiple domains')
+ def test_list_role_assignment_containing_names(self):
+ self.skipTest('N/A: LDAP does not support multiple domains')
+
def test_list_projects_for_user(self):
domain = self._get_domain_fixture()
- user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.new_user_ref(domain_id=domain['id'])
user1 = self.identity_api.create_user(user1)
user_projects = self.assignment_api.list_projects_for_user(user1['id'])
self.assertThat(user_projects, matchers.HasLength(0))
@@ -347,11 +408,10 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.assertThat(user_projects, matchers.HasLength(2))
# Now, check number of projects through groups
- user2 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
+ user2 = self.new_user_ref(domain_id=domain['id'])
user2 = self.identity_api.create_user(user2)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
self.identity_api.add_user_to_group(user2['id'], group1['id'])
@@ -377,12 +437,11 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_list_projects_for_user_and_groups(self):
domain = self._get_domain_fixture()
# Create user1
- user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
+ user1 = self.new_user_ref(domain_id=domain['id'])
user1 = self.identity_api.create_user(user1)
# Create new group for user1
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
# Add user1 to group1
@@ -412,20 +471,17 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_list_projects_for_user_with_grants(self):
domain = self._get_domain_fixture()
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': domain['id']}
+ new_user = self.new_user_ref(domain_id=domain['id'])
new_user = self.identity_api.create_user(new_user)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
- group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group2 = unit.new_group_ref(domain_id=domain['id'])
group2 = self.identity_api.create_group(group2)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
+ project1 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
- project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
+ project2 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
self.identity_api.add_user_to_group(new_user['id'],
@@ -496,14 +552,11 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_list_role_assignments_unfiltered(self):
new_domain = self._get_domain_fixture()
- new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.new_user_ref(domain_id=new_domain['id'])
new_user = self.identity_api.create_user(new_user)
- new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
+ new_group = unit.new_group_ref(domain_id=new_domain['id'])
new_group = self.identity_api.create_group(new_group)
- new_project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': new_domain['id']}
+ new_project = unit.new_project_ref(domain_id=new_domain['id'])
self.resource_api.create_project(new_project['id'], new_project)
# First check how many role grant already exist
@@ -520,13 +573,6 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
after_assignments = len(self.assignment_api.list_role_assignments())
self.assertEqual(existing_assignments + 2, after_assignments)
- def test_list_role_assignments_filtered_by_role(self):
- # Domain roles are not supported by the LDAP Assignment backend
- self.assertRaises(
- exception.NotImplemented,
- super(BaseLDAPIdentity, self).
- test_list_role_assignments_filtered_by_role)
-
def test_list_role_assignments_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
self.ldapdb.clear()
@@ -534,12 +580,9 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.load_fixtures(default_fixtures)
new_domain = self._get_domain_fixture()
- new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
+ new_user = self.new_user_ref(domain_id=new_domain['id'])
new_user = self.identity_api.create_user(new_user)
- new_project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': new_domain['id']}
+ new_project = unit.new_project_ref(domain_id=new_domain['id'])
self.resource_api.create_project(new_project['id'], new_project)
self.assignment_api.create_grant(user_id=new_user['id'],
project_id=new_project['id'],
@@ -558,8 +601,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.load_backends()
self.load_fixtures(default_fixtures)
- user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': test_backend.DEFAULT_DOMAIN_ID}
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_baz['id'],
@@ -582,10 +624,8 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
are returned.
"""
-
# Create a group
- group = dict(name=uuid.uuid4().hex,
- domain_id=CONF.identity.default_domain_id)
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
group_id = self.identity_api.create_group(group)['id']
# Create a couple of users and add them to the group.
@@ -617,10 +657,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_list_group_members_when_no_members(self):
# List group members when there is no member in the group.
# No exception should be raised.
- group = {
- 'domain_id': CONF.identity.default_domain_id,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex}
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
group = self.identity_api.create_group(group)
# If this doesn't raise, then the test is successful.
@@ -633,8 +670,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.load_fixtures(default_fixtures)
# Create a group
- group = dict(name=uuid.uuid4().hex,
- domain_id=CONF.identity.default_domain_id)
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
group_id = self.identity_api.create_group(group)['id']
# Create a user
@@ -651,30 +687,23 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.assertNotIn(dumb_id, user_ids)
def test_list_domains(self):
+ # We have more domains here than the parent class, check for the
+ # correct number of domains for the multildap backend configs
+ domain1 = unit.new_domain_ref()
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ self.resource_api.create_domain(domain2['id'], domain2)
domains = self.resource_api.list_domains()
- self.assertEqual(
- [resource.calc_default_domain()],
- domains)
-
- def test_list_domains_non_default_domain_id(self):
- # If change the default_domain_id, the ID of the default domain
- # returned by list_domains changes is the new default_domain_id.
-
- new_domain_id = uuid.uuid4().hex
- self.config_fixture.config(group='identity',
- default_domain_id=new_domain_id)
-
- domains = self.resource_api.list_domains()
-
- self.assertEqual(new_domain_id, domains[0]['id'])
+ self.assertEqual(7, len(domains))
+ domain_ids = []
+ for domain in domains:
+ domain_ids.append(domain.get('id'))
+ self.assertIn(CONF.identity.default_domain_id, domain_ids)
+ self.assertIn(domain1['id'], domain_ids)
+ self.assertIn(domain2['id'], domain_ids)
def test_authenticate_requires_simple_bind(self):
- user = {
- 'name': 'NO_META',
- 'domain_id': test_backend.DEFAULT_DOMAIN_ID,
- 'password': 'no_meta2',
- 'enabled': True,
- }
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_baz['id'],
user['id'])
@@ -689,34 +718,54 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
user_id=user['id'],
password=None)
- # (spzala)The group and domain crud tests below override the standard ones
- # in test_backend.py so that we can exclude the update name test, since we
- # do not yet support the update of either group or domain names with LDAP.
- # In the tests below, the update is demonstrated by updating description.
- # Refer to bug 1136403 for more detail.
- def test_group_crud(self):
- group = {
- 'domain_id': CONF.identity.default_domain_id,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex}
+ # The group and domain CRUD tests below override the standard ones in
+ # unit.identity.test_backends.py so that we can exclude the update name
+ # test, since we do not (and will not) support the update of either group
+ # or domain names with LDAP. In the tests below, the update is tested by
+ # updating description.
+ @mock.patch.object(versionutils, 'report_deprecated_feature')
+ def test_group_crud(self, mock_deprecator):
+ # NOTE(stevemar): As of the Mitaka release, we now check for calls that
+ # the LDAP write functionality has been deprecated.
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
group = self.identity_api.create_group(group)
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("create_group for the LDAP identity backend", args[1])
+
group_ref = self.identity_api.get_group(group['id'])
- self.assertDictEqual(group_ref, group)
+ self.assertDictEqual(group, group_ref)
group['description'] = uuid.uuid4().hex
self.identity_api.update_group(group['id'], group)
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("update_group for the LDAP identity backend", args[1])
+
group_ref = self.identity_api.get_group(group['id'])
- self.assertDictEqual(group_ref, group)
+ self.assertDictEqual(group, group_ref)
self.identity_api.delete_group(group['id'])
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("delete_group for the LDAP identity backend", args[1])
self.assertRaises(exception.GroupNotFound,
self.identity_api.get_group,
group['id'])
+ @mock.patch.object(versionutils, 'report_deprecated_feature')
+ def test_add_remove_user_group_deprecated(self, mock_deprecator):
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group = self.identity_api.create_group(group)
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user = self.identity_api.create_user(user)
+ self.identity_api.add_user_to_group(user['id'], group['id'])
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("add_user_to_group for the LDAP identity", args[1])
+
+ self.identity_api.remove_user_from_group(user['id'], group['id'])
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("remove_user_from_group for the LDAP identity", args[1])
+
@unit.skip_if_cache_disabled('identity')
def test_cache_layer_group_crud(self):
- group = {
- 'domain_id': CONF.identity.default_domain_id,
- 'name': uuid.uuid4().hex}
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
group = self.identity_api.create_group(group)
# cache the result
group_ref = self.identity_api.get_group(group['id'])
@@ -731,9 +780,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.assertRaises(exception.GroupNotFound,
self.identity_api.get_group, group['id'])
- group = {
- 'domain_id': CONF.identity.default_domain_id,
- 'name': uuid.uuid4().hex}
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
group = self.identity_api.create_group(group)
# cache the result
self.identity_api.get_group(group['id'])
@@ -749,11 +796,8 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
CONF.identity.default_domain_id)
driver.user.attribute_ignore = ['enabled', 'email',
'tenants', 'tenantId']
- user = {'name': u'fäké1',
- 'password': u'fäképass1',
- 'domain_id': CONF.identity.default_domain_id,
- 'default_project_id': 'maps_to_none',
- }
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id,
+ project_id='maps_to_none')
# If this doesn't raise, then the test is successful.
user = self.identity_api.create_user(user)
@@ -765,9 +809,8 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
'TrUe' 'FaLse']
for name in boolean_strings:
- user = {
- 'name': name,
- 'domain_id': CONF.identity.default_domain_id}
+ user = self.new_user_ref(name=name,
+ domain_id=CONF.identity.default_domain_id)
user_ref = self.identity_api.create_user(user)
user_info = self.identity_api.get_user(user_ref['id'])
self.assertEqual(name, user_info['name'])
@@ -786,10 +829,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
driver.user.attribute_ignore = ['enabled', 'email',
'tenants', 'tenantId']
- user = {'name': u'fäké1',
- 'password': u'fäképass1',
- 'domain_id': CONF.identity.default_domain_id,
- }
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user_ref = self.identity_api.create_user(user)
@@ -818,19 +858,14 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
def test_user_id_comma(self):
"""Even if the user has a , in their ID, groups can be listed."""
-
# Create a user with a , in their ID
# NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
# Since we want to fake up this special ID, we'll squirt this
# direct into the driver and bypass the manager layer.
user_id = u'Doe, John'
- user = {
- 'id': user_id,
- 'name': self.getUniqueString(),
- 'password': self.getUniqueString(),
- 'domain_id': CONF.identity.default_domain_id,
- }
+ user = self.new_user_ref(id=user_id,
+ domain_id=CONF.identity.default_domain_id)
user = self.identity_api.driver.create_user(user_id, user)
# Now we'll use the manager to discover it, which will create a
@@ -843,13 +878,8 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
break
# Create a group
- group_id = uuid.uuid4().hex
- group = {
- 'id': group_id,
- 'name': self.getUniqueString(prefix='tuidc'),
- 'description': self.getUniqueString(),
- 'domain_id': CONF.identity.default_domain_id,
- }
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
+ group_id = group['id']
group = self.identity_api.driver.create_group(group_id, group)
# Now we'll use the manager to discover it, which will create a
# Public ID for it.
@@ -870,21 +900,15 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.assertThat(ref_list, matchers.Equals([group]))
def test_user_id_comma_grants(self):
- """Even if the user has a , in their ID, can get user and group grants.
- """
-
+ """List user and group grants, even with a comma in the user's ID."""
# Create a user with a , in their ID
# NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
# Since we want to fake up this special ID, we'll squirt this
# direct into the driver and bypass the manager layer
user_id = u'Doe, John'
- user = {
- 'id': user_id,
- 'name': self.getUniqueString(),
- 'password': self.getUniqueString(),
- 'domain_id': CONF.identity.default_domain_id,
- }
+ user = self.new_user_ref(id=user_id,
+ domain_id=CONF.identity.default_domain_id)
self.identity_api.driver.create_user(user_id, user)
# Now we'll use the manager to discover it, which will create a
@@ -943,8 +967,7 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
# There's no group fixture so create a group.
new_domain = self._get_domain_fixture()
- new_group = {'domain_id': new_domain['id'],
- 'name': uuid.uuid4().hex}
+ new_group = unit.new_group_ref(domain_id=new_domain['id'])
new_group = self.identity_api.create_group(new_group)
# Attempt to disable the group.
@@ -959,39 +982,55 @@ class BaseLDAPIdentity(test_backend.IdentityTests):
self.assertNotIn('enabled', group_info)
def test_project_enabled_ignored_disable_error(self):
- # When the server is configured so that the enabled attribute is
- # ignored for projects, projects cannot be disabled.
-
- self.config_fixture.config(group='ldap',
- project_attribute_ignore=['enabled'])
-
- # Need to re-load backends for the config change to take effect.
- self.load_backends()
-
- # Attempt to disable the project.
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.update_project,
- self.tenant_baz['id'], {'enabled': False})
-
- project_info = self.resource_api.get_project(self.tenant_baz['id'])
-
- # Unlike other entities, if 'enabled' is ignored then 'enabled' is
- # returned as part of the ref.
- self.assertIs(True, project_info['enabled'])
+ self.skipTest('Resource LDAP has been removed')
def test_list_role_assignment_by_domain(self):
"""Multiple domain assignments are not supported."""
self.assertRaises(
- (exception.Forbidden, exception.DomainNotFound),
+ (exception.Forbidden, exception.DomainNotFound,
+ exception.ValidationError),
super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain)
def test_list_role_assignment_by_user_with_domain_group_roles(self):
"""Multiple domain assignments are not supported."""
self.assertRaises(
- (exception.Forbidden, exception.DomainNotFound),
+ (exception.Forbidden, exception.DomainNotFound,
+ exception.ValidationError),
super(BaseLDAPIdentity, self).
test_list_role_assignment_by_user_with_domain_group_roles)
+ def test_domain_crud(self):
+ self.skipTest('Resource LDAP has been removed')
+
+ def test_list_role_assignment_using_sourced_groups_with_domains(self):
+ """Multiple domain assignments are not supported."""
+ self.assertRaises(
+ (exception.Forbidden, exception.ValidationError,
+ exception.DomainNotFound),
+ super(BaseLDAPIdentity, self).
+ test_list_role_assignment_using_sourced_groups_with_domains)
+
+ def test_create_project_with_domain_id_and_without_parent_id(self):
+ """Multiple domains are not supported."""
+ self.assertRaises(
+ exception.ValidationError,
+ super(BaseLDAPIdentity, self).
+ test_create_project_with_domain_id_and_without_parent_id)
+
+ def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
+ """Multiple domains are not supported."""
+ self.assertRaises(
+ exception.ValidationError,
+ super(BaseLDAPIdentity, self).
+ test_create_project_with_domain_id_mismatch_to_parent_domain)
+
+ def test_remove_foreign_assignments_when_deleting_a_domain(self):
+ """Multiple domains are not supported."""
+ self.assertRaises(
+ (exception.ValidationError, exception.DomainNotFound),
+ super(BaseLDAPIdentity,
+ self).test_remove_foreign_assignments_when_deleting_a_domain)
+
class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
@@ -1002,46 +1041,46 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
self.useFixture(database.Database())
super(LDAPIdentity, self).setUp()
_assert_backends(self,
- assignment='ldap',
+ assignment='sql',
identity='ldap',
- resource='ldap')
+ resource='sql')
def load_fixtures(self, fixtures):
# Override super impl since need to create group container.
create_group_container(self.identity_api)
super(LDAPIdentity, self).load_fixtures(fixtures)
+ def test_list_domains(self):
+ domains = self.resource_api.list_domains()
+ self.assertEqual([resource.calc_default_domain()], domains)
+
def test_configurable_allowed_project_actions(self):
domain = self._get_domain_fixture()
- tenant = {'id': u'fäké1', 'name': u'fäké1', 'enabled': True,
- 'domain_id': domain['id']}
- self.resource_api.create_project(u'fäké1', tenant)
- tenant_ref = self.resource_api.get_project(u'fäké1')
- self.assertEqual(u'fäké1', tenant_ref['id'])
+ project = unit.new_project_ref(domain_id=domain['id'])
+ project = self.resource_api.create_project(project['id'], project)
+ project_ref = self.resource_api.get_project(project['id'])
+ self.assertEqual(project['id'], project_ref['id'])
- tenant['enabled'] = False
- self.resource_api.update_project(u'fäké1', tenant)
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
- self.resource_api.delete_project(u'fäké1')
+ self.resource_api.delete_project(project['id'])
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project,
- u'fäké1')
+ project['id'])
def test_configurable_subtree_delete(self):
self.config_fixture.config(group='ldap', allow_subtree_delete=True)
self.load_backends()
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id}
+ project1 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
self.resource_api.create_project(project1['id'], project1)
- role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role1 = unit.new_role_ref()
self.role_api.create_role(role1['id'], role1)
- user1 = {'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'password': uuid.uuid4().hex,
- 'enabled': True}
+ user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user1 = self.identity_api.create_user(user1)
self.assignment_api.add_role_to_user_and_project(
@@ -1062,48 +1101,10 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
self.assertEqual(0, len(list))
def test_configurable_forbidden_project_actions(self):
- self.config_fixture.config(
- group='ldap', project_allow_create=False,
- project_allow_update=False, project_allow_delete=False)
- self.load_backends()
-
- domain = self._get_domain_fixture()
- tenant = {'id': u'fäké1', 'name': u'fäké1', 'domain_id': domain['id']}
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.create_project,
- u'fäké1',
- tenant)
-
- self.tenant_bar['enabled'] = False
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.update_project,
- self.tenant_bar['id'],
- self.tenant_bar)
- self.assertRaises(exception.ForbiddenAction,
- self.resource_api.delete_project,
- self.tenant_bar['id'])
+ self.skipTest('Resource LDAP has been removed')
def test_project_filter(self):
- tenant_ref = self.resource_api.get_project(self.tenant_bar['id'])
- self.assertDictEqual(tenant_ref, self.tenant_bar)
-
- self.config_fixture.config(group='ldap',
- project_filter='(CN=DOES_NOT_MATCH)')
- self.load_backends()
- # NOTE(morganfainberg): CONF.ldap.project_filter will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change config values in tests that
- # could affect what the drivers would return up to the manager. This
- # solves this assumption when working with aggressive (on-create)
- # cache population.
- self.role_api.get_role.invalidate(self.role_api,
- self.role_member['id'])
- self.role_api.get_role(self.role_member['id'])
- self.resource_api.get_project.invalidate(self.resource_api,
- self.tenant_bar['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- self.tenant_bar['id'])
+ self.skipTest('Resource LDAP has been removed')
def test_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
@@ -1116,71 +1117,10 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
dumb_id)
def test_project_attribute_mapping(self):
- self.config_fixture.config(
- group='ldap', project_name_attribute='ou',
- project_desc_attribute='description',
- project_enabled_attribute='enabled')
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
- # NOTE(morganfainberg): CONF.ldap.project_name_attribute,
- # CONF.ldap.project_desc_attribute, and
- # CONF.ldap.project_enabled_attribute will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change config values in tests that
- # could affect what the drivers would return up to the manager. This
- # solves this assumption when working with aggressive (on-create)
- # cache population.
- self.resource_api.get_project.invalidate(self.resource_api,
- self.tenant_baz['id'])
- tenant_ref = self.resource_api.get_project(self.tenant_baz['id'])
- self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
- self.assertEqual(self.tenant_baz['name'], tenant_ref['name'])
- self.assertEqual(
- self.tenant_baz['description'],
- tenant_ref['description'])
- self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
-
- self.config_fixture.config(group='ldap',
- project_name_attribute='description',
- project_desc_attribute='ou')
- self.load_backends()
- # NOTE(morganfainberg): CONF.ldap.project_name_attribute,
- # CONF.ldap.project_desc_attribute, and
- # CONF.ldap.project_enabled_attribute will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change config values in tests that
- # could affect what the drivers would return up to the manager. This
- # solves this assumption when working with aggressive (on-create)
- # cache population.
- self.resource_api.get_project.invalidate(self.resource_api,
- self.tenant_baz['id'])
- tenant_ref = self.resource_api.get_project(self.tenant_baz['id'])
- self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
- self.assertEqual(self.tenant_baz['description'], tenant_ref['name'])
- self.assertEqual(self.tenant_baz['name'], tenant_ref['description'])
- self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
+ self.skipTest('Resource LDAP has been removed')
def test_project_attribute_ignore(self):
- self.config_fixture.config(
- group='ldap',
- project_attribute_ignore=['name', 'description', 'enabled'])
- self.ldapdb.clear()
- self.load_backends()
- self.load_fixtures(default_fixtures)
- # NOTE(morganfainberg): CONF.ldap.project_attribute_ignore will not be
- # dynamically changed at runtime. This invalidate is a work-around for
- # the expectation that it is safe to change configs values in tests
- # that could affect what the drivers would return up to the manager.
- # This solves this assumption when working with aggressive (on-create)
- # cache population.
- self.resource_api.get_project.invalidate(self.resource_api,
- self.tenant_baz['id'])
- tenant_ref = self.resource_api.get_project(self.tenant_baz['id'])
- self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
- self.assertNotIn('name', tenant_ref)
- self.assertNotIn('description', tenant_ref)
- self.assertNotIn('enabled', tenant_ref)
+ self.skipTest('Resource LDAP has been removed')
def test_user_enable_attribute_mask(self):
self.config_fixture.config(group='ldap', user_enabled_mask=2,
@@ -1189,8 +1129,7 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
self.load_backends()
self.load_fixtures(default_fixtures)
- user = {'name': u'fäké1', 'enabled': True,
- 'domain_id': CONF.identity.default_domain_id}
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user_ref = self.identity_api.create_user(user)
@@ -1237,14 +1176,12 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
self.load_backends()
self.load_fixtures(default_fixtures)
- user1 = {'name': u'fäké1', 'enabled': True,
- 'domain_id': CONF.identity.default_domain_id}
+ user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user2 = {'name': u'fäké2', 'enabled': False,
- 'domain_id': CONF.identity.default_domain_id}
+ user2 = self.new_user_ref(enabled=False,
+ domain_id=CONF.identity.default_domain_id)
- user3 = {'name': u'fäké3',
- 'domain_id': CONF.identity.default_domain_id}
+ user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
# Ensure that the LDAP attribute is False for a newly created
# enabled user.
@@ -1473,15 +1410,28 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
group='ldap',
user_additional_attribute_mapping=['description:name'])
self.load_backends()
- user = {
- 'name': 'EXTRA_ATTRIBUTES',
- 'password': 'extra',
- 'domain_id': CONF.identity.default_domain_id
- }
+ user = self.new_user_ref(name='EXTRA_ATTRIBUTES',
+ password='extra',
+ domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
dn, attrs = self.identity_api.driver.user._ldap_get(user['id'])
self.assertThat([user['name']], matchers.Equals(attrs['description']))
+ def test_user_description_attribute_mapping(self):
+ self.config_fixture.config(
+ group='ldap',
+ user_description_attribute='displayName')
+ self.load_backends()
+
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id,
+ displayName=uuid.uuid4().hex)
+ description = user['displayName']
+ user = self.identity_api.create_user(user)
+ res = self.identity_api.driver.user.get_all()
+
+ new_user = [u for u in res if u['id'] == user['id']][0]
+ self.assertThat(new_user['description'], matchers.Equals(description))
+
def test_user_extra_attribute_mapping_description_is_returned(self):
# Given a mapping like description:description, the description is
# returned.
@@ -1491,13 +1441,9 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
user_additional_attribute_mapping=['description:description'])
self.load_backends()
- description = uuid.uuid4().hex
- user = {
- 'name': uuid.uuid4().hex,
- 'description': description,
- 'password': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id
- }
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id,
+ description=uuid.uuid4().hex)
+ description = user['description']
user = self.identity_api.create_user(user)
res = self.identity_api.driver.user.get_all()
@@ -1551,52 +1497,17 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
'fake': 'invalid', 'invalid2': ''}
self.assertDictEqual(expected_dict, mapping)
-# TODO(henry-nash): These need to be removed when the full LDAP implementation
-# is submitted - see Bugs 1092187, 1101287, 1101276, 1101289
-
- def test_domain_crud(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True, 'description': uuid.uuid4().hex}
- self.assertRaises(exception.Forbidden,
+ def test_create_domain(self):
+ domain = unit.new_domain_ref()
+ self.assertRaises(exception.ValidationError,
self.resource_api.create_domain,
domain['id'],
domain)
- self.assertRaises(exception.Conflict,
- self.resource_api.create_domain,
- CONF.identity.default_domain_id,
- domain)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- domain['description'] = uuid.uuid4().hex
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.update_domain,
- domain['id'],
- domain)
- self.assertRaises(exception.Forbidden,
- self.resource_api.update_domain,
- CONF.identity.default_domain_id,
- domain)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.delete_domain,
- domain['id'])
- self.assertRaises(exception.Forbidden,
- self.resource_api.delete_domain,
- CONF.identity.default_domain_id)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
@unit.skip_if_no_multiple_domains_support
def test_create_domain_case_sensitivity(self):
# domains are read-only, so case sensitivity isn't an issue
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
+ ref = unit.new_domain_ref()
self.assertRaises(exception.Forbidden,
self.resource_api.create_domain,
ref['id'],
@@ -1624,22 +1535,18 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
# NOTE(topol): LDAP implementation does not currently support the
# updating of a project name so this method override
# provides a different update test
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(project['id'], project)
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+
+ project = self.resource_api.create_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project_ref, project)
+ self.assertDictEqual(project, project_ref)
project['description'] = uuid.uuid4().hex
self.resource_api.update_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project_ref, project)
+ self.assertDictEqual(project, project_ref)
self.resource_api.delete_project(project['id'])
self.assertRaises(exception.ProjectNotFound,
@@ -1651,12 +1558,11 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
# NOTE(morganfainberg): LDAP implementation does not currently support
# updating project names. This method override provides a different
# update test.
- project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'description': uuid.uuid4().hex}
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
project_id = project['id']
# Create a project
- self.resource_api.create_project(project_id, project)
+ project = self.resource_api.create_project(project_id, project)
self.resource_api.get_project(project_id)
updated_project = copy.deepcopy(project)
updated_project['description'] = uuid.uuid4().hex
@@ -1700,70 +1606,10 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
self.resource_api.get_project,
project_id)
- def _assert_create_hierarchy_not_allowed(self):
- domain = self._get_domain_fixture()
-
- project1 = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': domain['id'],
- 'enabled': True,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(project1['id'], project1)
-
- # Creating project2 under project1. LDAP will not allow
- # the creation of a project with parent_id being set
- project2 = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': domain['id'],
- 'enabled': True,
- 'parent_id': project1['id'],
- 'is_domain': False}
-
- self.assertRaises(exception.InvalidParentProject,
- self.resource_api.create_project,
- project2['id'],
- project2)
-
- # Now, we'll create project 2 with no parent
- project2['parent_id'] = None
- self.resource_api.create_project(project2['id'], project2)
-
- # Returning projects to be used across the tests
- return [project1, project2]
-
- def _assert_create_is_domain_project_not_allowed(self):
- """Tests that we can't create more than one project acting as domain.
-
- This method will be used at any test that require the creation of a
- project that act as a domain. LDAP does not support multiple domains
- and the only domain it has (default) is immutable.
- """
- domain = self._get_domain_fixture()
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': domain['id'],
- 'enabled': True,
- 'parent_id': None,
- 'is_domain': True}
-
- self.assertRaises(exception.ValidationError,
- self.resource_api.create_project,
- project['id'], project)
-
def test_update_is_domain_field(self):
domain = self._get_domain_fixture()
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': '',
- 'domain_id': domain['id'],
- 'enabled': True,
- 'parent_id': None,
- 'is_domain': False}
- self.resource_api.create_project(project['id'], project)
+ project = unit.new_project_ref(domain_id=domain['id'])
+ project = self.resource_api.create_project(project['id'], project)
# Try to update the is_domain field to True
project['is_domain'] = True
@@ -1772,97 +1618,87 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
project['id'], project)
def test_delete_is_domain_project(self):
- self._assert_create_is_domain_project_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_create_domain_under_regular_project_hierarchy_fails(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_create_not_is_domain_project_under_is_domain_hierarchy(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
- def test_create_is_domain_project(self):
- self._assert_create_is_domain_project_not_allowed()
+ def test_create_project_passing_is_domain_flag_true(self):
+ self.skipTest('Resource LDAP has been removed')
def test_create_project_with_parent_id_and_without_domain_id(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_check_leaf_projects(self):
- projects = self._assert_create_hierarchy_not_allowed()
- for project in projects:
- self.assertTrue(self.resource_api.is_leaf_project(project))
+ self.skipTest('Resource LDAP has been removed')
def test_list_projects_in_subtree(self):
- projects = self._assert_create_hierarchy_not_allowed()
- for project in projects:
- subtree_list = self.resource_api.list_projects_in_subtree(
- project['id'])
- self.assertEqual(0, len(subtree_list))
+ self.skipTest('Resource LDAP has been removed')
def test_list_projects_in_subtree_with_circular_reference(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_list_project_parents(self):
- projects = self._assert_create_hierarchy_not_allowed()
- for project in projects:
- parents_list = self.resource_api.list_project_parents(
- project['id'])
- self.assertEqual(0, len(parents_list))
+ self.skipTest('Resource LDAP has been removed')
+
+ def test_update_project_enabled_cascade(self):
+ self.skipTest('Resource LDAP has been removed')
+
+ def test_cannot_enable_cascade_with_parent_disabled(self):
+ self.skipTest('Resource LDAP has been removed')
def test_hierarchical_projects_crud(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_create_project_under_disabled_one(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_create_project_with_invalid_parent(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_create_leaf_project_with_invalid_domain(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_update_project_parent(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_enable_project_with_disabled_parent(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_disable_hierarchical_leaf_project(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_disable_hierarchical_not_leaf_project(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_delete_hierarchical_leaf_project(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_delete_hierarchical_not_leaf_project(self):
- self._assert_create_hierarchy_not_allowed()
+ self.skipTest('Resource LDAP has been removed')
def test_check_hierarchy_depth(self):
- projects = self._assert_create_hierarchy_not_allowed()
- for project in projects:
- depth = self._get_hierarchy_depth(project['id'])
- self.assertEqual(1, depth)
+ self.skipTest('Resource LDAP has been removed')
def test_multi_role_grant_by_user_group_on_project_domain(self):
# This is a partial implementation of the standard test that
- # is defined in test_backend.py. It omits both domain and
- # group grants. since neither of these are yet supported by
- # the ldap backend.
+ # is defined in unit.assignment.test_backends.py. It omits
+ # both domain and group grants. since neither of these are
+ # yet supported by the ldap backend.
role_list = []
for _ in range(2):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
role_list.append(role)
- user1 = {'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'password': uuid.uuid4().hex,
- 'enabled': True}
+ user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user1 = self.identity_api.create_user(user1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id}
+ project1 = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
self.resource_api.create_project(project1['id'], project1)
self.assignment_api.add_role_to_user_and_project(
@@ -1947,7 +1783,7 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
expected_group_ids = []
numgroups = 3
for _ in range(numgroups):
- group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group = unit.new_group_ref(domain_id=domain['id'])
group = self.identity_api.create_group(group)
expected_group_ids.append(group['id'])
# Fetch the test groups and ensure that they don't contain a dn.
@@ -1960,16 +1796,14 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
def test_list_groups_for_user_no_dn(self):
# Create a test user.
- user = {'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'password': uuid.uuid4().hex, 'enabled': True}
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
# Create some test groups and add the test user as a member.
domain = self._get_domain_fixture()
expected_group_ids = []
numgroups = 3
for _ in range(numgroups):
- group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group = unit.new_group_ref(domain_id=domain['id'])
group = self.identity_api.create_group(group)
expected_group_ids.append(group['id'])
self.identity_api.add_user_to_group(user['id'], group['id'])
@@ -1987,9 +1821,7 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
CONF.identity.default_domain_id)
driver.user.id_attr = 'mail'
- user = {'name': u'fäké1',
- 'password': u'fäképass1',
- 'domain_id': CONF.identity.default_domain_id}
+ user = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
user_ref = self.identity_api.get_user(user['id'])
# 'email' attribute should've created because it is also being used
@@ -2083,6 +1915,35 @@ class LDAPIdentity(BaseLDAPIdentity, unit.TestCase):
self.assertEqual('Foo Bar', user_ref['name'])
+class LDAPLimitTests(unit.TestCase, identity_tests.LimitTests):
+ def setUp(self):
+ super(LDAPLimitTests, self).setUp()
+
+ self.useFixture(ldapdb.LDAPDatabase())
+ self.useFixture(database.Database(self.sql_driver_version_overrides))
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ identity_tests.LimitTests.setUp(self)
+ _assert_backends(self,
+ assignment='sql',
+ identity='ldap',
+ resource='sql')
+
+ def config_overrides(self):
+ super(LDAPLimitTests, self).config_overrides()
+ self.config_fixture.config(group='identity', driver='ldap')
+ self.config_fixture.config(group='identity',
+ list_limit=len(default_fixtures.USERS) - 1)
+
+ def config_files(self):
+ config_files = super(LDAPLimitTests, self).config_files()
+ config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
+
+ def test_list_projects_filtered_and_limited(self):
+ self.skipTest("ldap for storing projects is deprecated")
+
+
class LDAPIdentityEnabledEmulation(LDAPIdentity):
def setUp(self):
super(LDAPIdentityEnabledEmulation, self).setUp()
@@ -2092,10 +1953,7 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
for obj in [self.tenant_bar, self.tenant_baz, self.user_foo,
self.user_two, self.user_badguy]:
obj.setdefault('enabled', True)
- _assert_backends(self,
- assignment='ldap',
- identity='ldap',
- resource='ldap')
+ _assert_backends(self, identity='ldap')
def load_fixtures(self, fixtures):
# Override super impl since need to create group container.
@@ -2110,60 +1968,62 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
def config_overrides(self):
super(LDAPIdentityEnabledEmulation, self).config_overrides()
self.config_fixture.config(group='ldap',
- user_enabled_emulation=True,
- project_enabled_emulation=True)
+ user_enabled_emulation=True)
def test_project_crud(self):
# NOTE(topol): LDAPIdentityEnabledEmulation will create an
# enabled key in the project dictionary so this
# method override handles this side-effect
- project = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'description': uuid.uuid4().hex,
- 'parent_id': None,
- 'is_domain': False}
-
- self.resource_api.create_project(project['id'], project)
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+
+ project = self.resource_api.create_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
# self.resource_api.create_project adds an enabled
# key with a value of True when LDAPIdentityEnabledEmulation
# is used so we now add this expected key to the project dictionary
project['enabled'] = True
- self.assertDictEqual(project_ref, project)
+ self.assertDictEqual(project, project_ref)
project['description'] = uuid.uuid4().hex
self.resource_api.update_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project_ref, project)
+ self.assertDictEqual(project, project_ref)
self.resource_api.delete_project(project['id'])
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project,
project['id'])
- def test_user_crud(self):
- user_dict = {
- 'domain_id': CONF.identity.default_domain_id,
- 'name': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex}
+ @mock.patch.object(versionutils, 'report_deprecated_feature')
+ def test_user_crud(self, mock_deprecator):
+ # NOTE(stevemar): As of the Mitaka release, we now check for calls that
+ # the LDAP write functionality has been deprecated.
+ user_dict = self.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user_dict)
- user_dict['enabled'] = True
- user_ref = self.identity_api.get_user(user['id'])
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("create_user for the LDAP identity backend", args[1])
+
del user_dict['password']
+ user_ref = self.identity_api.get_user(user['id'])
user_ref_dict = {x: user_ref[x] for x in user_ref}
self.assertDictContainsSubset(user_dict, user_ref_dict)
user_dict['password'] = uuid.uuid4().hex
- self.identity_api.update_user(user['id'], user)
- user_ref = self.identity_api.get_user(user['id'])
+ self.identity_api.update_user(user['id'], user_dict)
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("update_user for the LDAP identity backend", args[1])
+
del user_dict['password']
+ user_ref = self.identity_api.get_user(user['id'])
user_ref_dict = {x: user_ref[x] for x in user_ref}
self.assertDictContainsSubset(user_dict, user_ref_dict)
self.identity_api.delete_user(user['id'])
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("delete_user for the LDAP identity backend", args[1])
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
user['id'])
@@ -2192,8 +2052,8 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
self.load_fixtures(default_fixtures)
# Create a user and ensure they are enabled.
- user1 = {'name': u'fäké1', 'enabled': True,
- 'domain_id': CONF.identity.default_domain_id}
+ user1 = unit.new_user_ref(enabled=True,
+ domain_id=CONF.identity.default_domain_id)
user_ref = self.identity_api.create_user(user1)
self.assertIs(True, user_ref['enabled'])
@@ -2208,14 +2068,12 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
self.load_backends()
self.load_fixtures(default_fixtures)
- user1 = {'name': u'fäké1', 'enabled': True,
- 'domain_id': CONF.identity.default_domain_id}
+ user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
- user2 = {'name': u'fäké2', 'enabled': False,
- 'domain_id': CONF.identity.default_domain_id}
+ user2 = self.new_user_ref(enabled=False,
+ domain_id=CONF.identity.default_domain_id)
- user3 = {'name': u'fäké3',
- 'domain_id': CONF.identity.default_domain_id}
+ user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
# Ensure that the enabled LDAP attribute is not set for a
# newly created enabled user.
@@ -2282,121 +2140,103 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
user_ref = user_api.get('123456789')
self.assertIs(False, user_ref['enabled'])
+ def test_escape_member_dn(self):
+ # The enabled member DN is properly escaped when querying for enabled
+ # user.
-class LdapIdentitySqlAssignment(BaseLDAPIdentity, unit.SQLDriverOverrides,
- unit.TestCase):
+ object_id = uuid.uuid4().hex
+ driver = self.identity_api._select_identity_driver(
+ CONF.identity.default_domain_id)
- def config_files(self):
- config_files = super(LdapIdentitySqlAssignment, self).config_files()
- config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf'))
- return config_files
+ # driver.user is the EnabledEmuMixIn implementation used for this test.
+ mixin_impl = driver.user
- def setUp(self):
- sqldb = self.useFixture(database.Database())
- super(LdapIdentitySqlAssignment, self).setUp()
- self.ldapdb.clear()
- self.load_backends()
- cache.configure_cache_region(cache.REGION)
+ # ) is a special char in a filter and must be escaped.
+ sample_dn = 'cn=foo)bar'
+ # LDAP requires ) is escaped by being replaced with "\29"
+ sample_dn_filter_esc = r'cn=foo\29bar'
- sqldb.recreate()
- self.load_fixtures(default_fixtures)
- # defaulted by the data load
- self.user_foo['enabled'] = True
- _assert_backends(self,
- assignment='sql',
- identity='ldap',
- resource='sql')
+ # Override the tree_dn, it's used to build the enabled member filter
+ mixin_impl.tree_dn = sample_dn
- def config_overrides(self):
- super(LdapIdentitySqlAssignment, self).config_overrides()
- self.config_fixture.config(group='identity', driver='ldap')
- self.config_fixture.config(group='resource', driver='sql')
- self.config_fixture.config(group='assignment', driver='sql')
+ # The filter that _get_enabled is going to build contains the
+ # tree_dn, which better be escaped in this case.
+ exp_filter = '(%s=%s=%s,%s)' % (
+ mixin_impl.member_attribute, mixin_impl.id_attr, object_id,
+ sample_dn_filter_esc)
- def test_domain_crud(self):
- pass
+ with mixin_impl.get_connection() as conn:
+ m = self.useFixture(mockpatch.PatchObject(conn, 'search_s')).mock
+ mixin_impl._get_enabled(object_id, conn)
+ # The 3rd argument is the DN.
+ self.assertEqual(exp_filter, m.call_args[0][2])
- def test_list_domains(self):
- domains = self.resource_api.list_domains()
- self.assertEqual([resource.calc_default_domain()], domains)
- def test_list_domains_non_default_domain_id(self):
- # If change the default_domain_id, the ID of the default domain
- # returned by list_domains doesn't change because the SQL identity
- # backend reads it from the database, which doesn't get updated by
- # config change.
+class LDAPPosixGroupsTest(unit.TestCase):
- orig_default_domain_id = CONF.identity.default_domain_id
+ def setUp(self):
- new_domain_id = uuid.uuid4().hex
- self.config_fixture.config(group='identity',
- default_domain_id=new_domain_id)
+ super(LDAPPosixGroupsTest, self).setUp()
- domains = self.resource_api.list_domains()
+ self.useFixture(ldapdb.LDAPDatabase())
+ self.useFixture(database.Database())
- self.assertEqual(orig_default_domain_id, domains[0]['id'])
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
- def test_create_domain(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- self.assertRaises(exception.Forbidden,
- self.resource_api.create_domain,
- domain['id'],
- domain)
+ _assert_backends(self, identity='ldap')
- def test_get_and_remove_role_grant_by_group_and_domain(self):
- # TODO(henry-nash): We should really rewrite the tests in test_backend
- # to be more flexible as to where the domains are sourced from, so
- # that we would not need to override such tests here. This is raised
- # as bug 1373865.
- new_domain = self._get_domain_fixture()
- new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
- new_group = self.identity_api.create_group(new_group)
- new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
- 'enabled': True, 'domain_id': new_domain['id']}
- new_user = self.identity_api.create_user(new_user)
- self.identity_api.add_user_to_group(new_user['id'],
- new_group['id'])
+ def load_fixtures(self, fixtures):
+ # Override super impl since need to create group container.
+ create_group_container(self.identity_api)
+ super(LDAPPosixGroupsTest, self).load_fixtures(fixtures)
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
+ def config_overrides(self):
+ super(LDAPPosixGroupsTest, self).config_overrides()
+ self.config_fixture.config(group='identity', driver='ldap')
+ self.config_fixture.config(group='ldap', group_members_are_ids=True,
+ group_member_attribute='memberUID')
- self.assignment_api.create_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
+ def config_files(self):
+ config_files = super(LDAPPosixGroupsTest, self).config_files()
+ config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
+ return config_files
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertDictEqual(roles_ref[0], self.role_member)
+ def _get_domain_fixture(self):
+ """Domains in LDAP are read-only, so just return the static one."""
+ return self.resource_api.get_domain(CONF.identity.default_domain_id)
- self.assignment_api.delete_grant(group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
- roles_ref = self.assignment_api.list_grants(
- group_id=new_group['id'],
- domain_id=new_domain['id'])
- self.assertEqual(0, len(roles_ref))
- self.assertRaises(exception.NotFound,
- self.assignment_api.delete_grant,
- group_id=new_group['id'],
- domain_id=new_domain['id'],
- role_id='member')
+ def test_posix_member_id(self):
+ domain = self._get_domain_fixture()
+ new_group = unit.new_group_ref(domain_id=domain['id'])
+ new_group = self.identity_api.create_group(new_group)
+ # Make sure we get an empty list back on a new group, not an error.
+ user_refs = self.identity_api.list_users_in_group(new_group['id'])
+ self.assertEqual([], user_refs)
+ # Make sure we get the correct users back once they have been added
+ # to the group.
+ new_user = unit.new_user_ref(domain_id=domain['id'])
+ new_user = self.identity_api.create_user(new_user)
- def test_project_enabled_ignored_disable_error(self):
- # Override
- self.skipTest("Doesn't apply since LDAP configuration is ignored for "
- "SQL assignment backend.")
+ # NOTE(amakarov): Create the group directly using LDAP operations
+ # rather than going through the manager.
+ group_api = self.identity_api.driver.group
+ group_ref = group_api.get(new_group['id'])
+ mod = (ldap.MOD_ADD, group_api.member_attribute, new_user['id'])
+ conn = group_api.get_connection()
+ conn.modify_s(group_ref['dn'], [mod])
- def test_list_role_assignments_filtered_by_role(self):
- # Domain roles are supported by the SQL Assignment backend
- base = super(BaseLDAPIdentity, self)
- base.test_list_role_assignments_filtered_by_role()
+ # Testing the case "the group contains a user"
+ user_refs = self.identity_api.list_users_in_group(new_group['id'])
+ self.assertIn(new_user['id'], (x['id'] for x in user_refs))
+ # Testing the case "the user is a member of a group"
+ group_refs = self.identity_api.list_groups_for_user(new_user['id'])
+ self.assertIn(new_group['id'], (x['id'] for x in group_refs))
-class LdapIdentitySqlAssignmentWithMapping(LdapIdentitySqlAssignment):
+
+class LdapIdentityWithMapping(
+ BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase):
"""Class to test mapping of default LDAP backend.
The default configuration is not to enable mapping when using a single
@@ -2405,8 +2245,28 @@ class LdapIdentitySqlAssignmentWithMapping(LdapIdentitySqlAssignment):
Setting backward_compatible_ids to False will enable this mapping.
"""
+
+ def config_files(self):
+ config_files = super(LdapIdentityWithMapping, self).config_files()
+ config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf'))
+ return config_files
+
+ def setUp(self):
+ sqldb = self.useFixture(database.Database())
+ super(LdapIdentityWithMapping, self).setUp()
+ self.ldapdb.clear()
+ self.load_backends()
+ cache.configure_cache()
+
+ sqldb.recreate()
+ self.load_fixtures(default_fixtures)
+ # defaulted by the data load
+ self.user_foo['enabled'] = True
+ _assert_backends(self, identity='ldap')
+
def config_overrides(self):
- super(LdapIdentitySqlAssignmentWithMapping, self).config_overrides()
+ super(LdapIdentityWithMapping, self).config_overrides()
+ self.config_fixture.config(group='identity', driver='ldap')
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
@@ -2420,13 +2280,9 @@ class LdapIdentitySqlAssignmentWithMapping(LdapIdentitySqlAssignment):
"""
initial_mappings = len(mapping_sql.list_id_mappings())
- user1 = {'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'password': uuid.uuid4().hex, 'enabled': True}
+ user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user1 = self.identity_api.create_user(user1)
- user2 = {'name': uuid.uuid4().hex,
- 'domain_id': CONF.identity.default_domain_id,
- 'password': uuid.uuid4().hex, 'enabled': True}
+ user2 = self.new_user_ref(domain_id=CONF.identity.default_domain_id)
user2 = self.identity_api.create_user(user2)
mappings = mapping_sql.list_id_mappings()
self.assertEqual(initial_mappings + 2, len(mappings))
@@ -2453,35 +2309,29 @@ class LdapIdentitySqlAssignmentWithMapping(LdapIdentitySqlAssignment):
self.skipTest('N/A: We never generate the same ID for a user and '
'group in our mapping table')
+ def test_list_domains(self):
+ domains = self.resource_api.list_domains()
+ self.assertEqual([resource.calc_default_domain()], domains)
+
class BaseMultiLDAPandSQLIdentity(object):
"""Mixin class with support methods for domain-specific config testing."""
- def create_user(self, domain_id):
- user = {'name': uuid.uuid4().hex,
- 'domain_id': domain_id,
- 'password': uuid.uuid4().hex,
- 'enabled': True}
- user_ref = self.identity_api.create_user(user)
- # Put the password back in, since this is used later by tests to
- # authenticate.
- user_ref['password'] = user['password']
- return user_ref
-
def create_users_across_domains(self):
"""Create a set of users, each with a role on their own domain."""
-
# We also will check that the right number of id mappings get created
initial_mappings = len(mapping_sql.list_id_mappings())
- self.users['user0'] = self.create_user(
+ self.users['user0'] = unit.create_user(
+ self.identity_api,
self.domains['domain_default']['id'])
self.assignment_api.create_grant(
user_id=self.users['user0']['id'],
domain_id=self.domains['domain_default']['id'],
role_id=self.role_member['id'])
for x in range(1, self.domain_count):
- self.users['user%s' % x] = self.create_user(
+ self.users['user%s' % x] = unit.create_user(
+ self.identity_api,
self.domains['domain%s' % x]['id'])
self.assignment_api.create_grant(
user_id=self.users['user%s' % x]['id'],
@@ -2506,13 +2356,13 @@ class BaseMultiLDAPandSQLIdentity(object):
self.identity_api._get_domain_driver_and_entity_id(
user['id']))
- if expected_status == 200:
+ if expected_status == http_client.OK:
ref = driver.get_user(entity_id)
ref = self.identity_api._set_domain_id_and_mapping(
ref, domain_id, driver, map.EntityType.USER)
user = user.copy()
del user['password']
- self.assertDictEqual(ref, user)
+ self.assertDictEqual(user, ref)
else:
# TODO(henry-nash): Use AssertRaises here, although
# there appears to be an issue with using driver.get_user
@@ -2570,6 +2420,7 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
domain.
"""
+
def setUp(self):
sqldb = self.useFixture(database.Database())
super(MultiLDAPandSQLIdentity, self).setUp()
@@ -2614,11 +2465,14 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
# Create some identity entities BEFORE we switch to multi-backend, so
# we can test that these are still accessible
self.users = {}
- self.users['userA'] = self.create_user(
+ self.users['userA'] = unit.create_user(
+ self.identity_api,
self.domains['domain_default']['id'])
- self.users['userB'] = self.create_user(
+ self.users['userB'] = unit.create_user(
+ self.identity_api,
self.domains['domain1']['id'])
- self.users['userC'] = self.create_user(
+ self.users['userC'] = unit.create_user(
+ self.identity_api,
self.domains['domain3']['id'])
def enable_multi_domain(self):
@@ -2631,7 +2485,8 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
"""
self.config_fixture.config(
group='identity', domain_specific_drivers_enabled=True,
- domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap')
+ domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap',
+ list_limit=1000)
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
@@ -2640,14 +2495,6 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
# if no specific config defined for this domain
return self.identity_api.domain_configs.get_domain_conf(domain_id)
- def test_list_domains(self):
- self.skipTest(
- 'N/A: Not relevant for multi ldap testing')
-
- def test_list_domains_non_default_domain_id(self):
- self.skipTest(
- 'N/A: Not relevant for multi ldap testing')
-
def test_list_users(self):
# Override the standard list users, since we have added an extra user
# to the default domain, so the number of expected users is one more
@@ -2664,6 +2511,36 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
self.assertNotIn('password', user_ref)
self.assertEqual(expected_user_ids, user_ids)
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get_all')
+ def test_list_limit_domain_specific_inheritance(self, ldap_get_all):
+ # passiging hints is important, because if it's not passed, limiting
+ # is considered be disabled
+ hints = driver_hints.Hints()
+ self.identity_api.list_users(
+ domain_scope=self.domains['domain2']['id'],
+ hints=hints)
+ # since list_limit is not specified in keystone.domain2.conf, it should
+ # take the default, which is 1000
+ self.assertTrue(ldap_get_all.called)
+ args, kwargs = ldap_get_all.call_args
+ hints = args[0]
+ self.assertEqual(1000, hints.limit['limit'])
+
+ @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get_all')
+ def test_list_limit_domain_specific_override(self, ldap_get_all):
+ # passiging hints is important, because if it's not passed, limiting
+ # is considered to be disabled
+ hints = driver_hints.Hints()
+ self.identity_api.list_users(
+ domain_scope=self.domains['domain1']['id'],
+ hints=hints)
+ # this should have the list_limit set in Keystone.domain1.conf, which
+ # is 101
+ self.assertTrue(ldap_get_all.called)
+ args, kwargs = ldap_get_all.call_args
+ hints = args[0]
+ self.assertEqual(101, hints.limit['limit'])
+
def test_domain_segregation(self):
"""Test that separate configs have segregated the domain.
@@ -2680,21 +2557,23 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
check_user = self.check_user
check_user(self.users['user0'],
- self.domains['domain_default']['id'], 200)
+ self.domains['domain_default']['id'], http_client.OK)
for domain in [self.domains['domain1']['id'],
self.domains['domain2']['id'],
self.domains['domain3']['id'],
self.domains['domain4']['id']]:
check_user(self.users['user0'], domain, exception.UserNotFound)
- check_user(self.users['user1'], self.domains['domain1']['id'], 200)
+ check_user(self.users['user1'], self.domains['domain1']['id'],
+ http_client.OK)
for domain in [self.domains['domain_default']['id'],
self.domains['domain2']['id'],
self.domains['domain3']['id'],
self.domains['domain4']['id']]:
check_user(self.users['user1'], domain, exception.UserNotFound)
- check_user(self.users['user2'], self.domains['domain2']['id'], 200)
+ check_user(self.users['user2'], self.domains['domain2']['id'],
+ http_client.OK)
for domain in [self.domains['domain_default']['id'],
self.domains['domain1']['id'],
self.domains['domain3']['id'],
@@ -2704,10 +2583,14 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
# domain3 and domain4 share the same backend, so you should be
# able to see user3 and user4 from either.
- check_user(self.users['user3'], self.domains['domain3']['id'], 200)
- check_user(self.users['user3'], self.domains['domain4']['id'], 200)
- check_user(self.users['user4'], self.domains['domain3']['id'], 200)
- check_user(self.users['user4'], self.domains['domain4']['id'], 200)
+ check_user(self.users['user3'], self.domains['domain3']['id'],
+ http_client.OK)
+ check_user(self.users['user3'], self.domains['domain4']['id'],
+ http_client.OK)
+ check_user(self.users['user4'], self.domains['domain3']['id'],
+ http_client.OK)
+ check_user(self.users['user4'], self.domains['domain4']['id'],
+ http_client.OK)
for domain in [self.domains['domain_default']['id'],
self.domains['domain1']['id'],
@@ -2789,19 +2672,12 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
self.assertEqual('fake://memory1', conf.ldap.url)
def test_delete_domain_with_user_added(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
- project = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'domain_id': domain['id'],
- 'description': uuid.uuid4().hex,
- 'parent_id': None,
- 'enabled': True,
- 'is_domain': False}
+ domain = unit.new_domain_ref()
+ project = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_domain(domain['id'], domain)
- self.resource_api.create_project(project['id'], project)
+ project = self.resource_api.create_project(project['id'], project)
project_ref = self.resource_api.get_project(project['id'])
- self.assertDictEqual(project_ref, project)
+ self.assertDictEqual(project, project_ref)
self.assignment_api.create_grant(user_id=self.user_foo['id'],
project_id=project['id'],
@@ -2839,13 +2715,37 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides,
def test_list_role_assignment_by_domain(self):
# With multi LDAP this method should work, so override the override
# from BaseLDAPIdentity
- super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain
+ super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain()
def test_list_role_assignment_by_user_with_domain_group_roles(self):
# With multi LDAP this method should work, so override the override
# from BaseLDAPIdentity
super(BaseLDAPIdentity, self).\
- test_list_role_assignment_by_user_with_domain_group_roles
+ test_list_role_assignment_by_user_with_domain_group_roles()
+
+ def test_list_role_assignment_using_sourced_groups_with_domains(self):
+ # With SQL Assignment this method should work, so override the override
+ # from BaseLDAPIdentity
+ base = super(BaseLDAPIdentity, self)
+ base.test_list_role_assignment_using_sourced_groups_with_domains()
+
+ def test_create_project_with_domain_id_and_without_parent_id(self):
+ # With multi LDAP this method should work, so override the override
+ # from BaseLDAPIdentity
+ super(BaseLDAPIdentity, self).\
+ test_create_project_with_domain_id_and_without_parent_id()
+
+ def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
+ # With multi LDAP this method should work, so override the override
+ # from BaseLDAPIdentity
+ super(BaseLDAPIdentity, self).\
+ test_create_project_with_domain_id_mismatch_to_parent_domain()
+
+ def test_remove_foreign_assignments_when_deleting_a_domain(self):
+ # With multi LDAP this method should work, so override the override
+ # from BaseLDAPIdentity
+ base = super(BaseLDAPIdentity, self)
+ base.test_remove_foreign_assignments_when_deleting_a_domain()
class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
@@ -2870,7 +2770,7 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
def enable_multi_domain(self):
# The values below are the same as in the domain_configs_multi_ldap
- # cdirectory of test config_files.
+ # directory of test config_files.
default_config = {
'ldap': {'url': 'fake://memory',
'user': 'cn=Admin',
@@ -2883,7 +2783,8 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'ldap'}
+ 'identity': {'driver': 'ldap',
+ 'list_limit': '101'}
}
domain2_config = {
'ldap': {'url': 'fake://memory',
@@ -2904,7 +2805,8 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
self.config_fixture.config(
group='identity', domain_specific_drivers_enabled=True,
- domain_configurations_from_database=True)
+ domain_configurations_from_database=True,
+ list_limit=1000)
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
@@ -2933,7 +2835,6 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
def test_reloading_domain_config(self):
"""Ensure domain drivers are reloaded on a config modification."""
-
domain_cfgs = self.identity_api.domain_configs
# Create a new config for the default domain, hence overwriting the
@@ -2965,7 +2866,6 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
def test_setting_multiple_sql_driver_raises_exception(self):
"""Ensure setting multiple domain specific sql drivers is prevented."""
-
new_config = {'identity': {'driver': 'sql'}}
self.domain_config_api.create_config(
CONF.identity.default_domain_id, new_config)
@@ -2979,7 +2879,6 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
def test_same_domain_gets_sql_driver(self):
"""Ensure we can set an SQL driver if we have had it before."""
-
new_config = {'identity': {'driver': 'sql'}}
self.domain_config_api.create_config(
CONF.identity.default_domain_id, new_config)
@@ -2997,8 +2896,7 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
def test_delete_domain_clears_sql_registration(self):
"""Ensure registration is deleted when a domain is deleted."""
-
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain = unit.new_domain_ref()
domain = self.resource_api.create_domain(domain['id'], domain)
new_config = {'identity': {'driver': 'sql'}}
self.domain_config_api.create_config(domain['id'], new_config)
@@ -3025,8 +2923,7 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
def test_orphaned_registration_does_not_prevent_getting_sql_driver(self):
"""Ensure we self heal an orphaned sql registration."""
-
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain = unit.new_domain_ref()
domain = self.resource_api.create_domain(domain['id'], domain)
new_config = {'identity': {'driver': 'sql'}}
self.domain_config_api.create_config(domain['id'], new_config)
@@ -3047,7 +2944,7 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity):
# should still be able to set another domain to SQL, since we should
# self heal this issue.
- self.resource_api.driver.delete_domain(domain['id'])
+ self.resource_api.driver.delete_project(domain['id'])
# Invalidate cache (so we will see the domain has gone)
self.resource_api.get_domain.invalidate(
self.resource_api, domain['id'])
@@ -3072,6 +2969,7 @@ class DomainSpecificLDAPandSQLIdentity(
Although the default driver still exists, we don't use it.
"""
+
def setUp(self):
sqldb = self.useFixture(database.Database())
super(DomainSpecificLDAPandSQLIdentity, self).setUp()
@@ -3133,6 +3031,17 @@ class DomainSpecificLDAPandSQLIdentity(
self.skipTest(
'N/A: Not relevant for multi ldap testing')
+ def test_not_delete_domain_with_enabled_subdomains(self):
+ self.skipTest(
+ 'N/A: Not relevant for multi ldap testing')
+
+ def test_delete_domain(self):
+ # With this restricted multi LDAP class, tests that use multiple
+ # domains and identity, are still not supported
+ self.assertRaises(
+ exception.DomainNotFound,
+ super(BaseLDAPIdentity, self).test_delete_domain_with_project_api)
+
def test_list_users(self):
# Override the standard list users, since we have added an extra user
# to the default domain, so the number of expected users is one more
@@ -3164,12 +3073,12 @@ class DomainSpecificLDAPandSQLIdentity(
# driver, but won't find it via any other domain driver
self.check_user(self.users['user0'],
- self.domains['domain_default']['id'], 200)
+ self.domains['domain_default']['id'], http_client.OK)
self.check_user(self.users['user0'],
self.domains['domain1']['id'], exception.UserNotFound)
self.check_user(self.users['user1'],
- self.domains['domain1']['id'], 200)
+ self.domains['domain1']['id'], http_client.OK)
self.check_user(self.users['user1'],
self.domains['domain_default']['id'],
exception.UserNotFound)
@@ -3182,10 +3091,10 @@ class DomainSpecificLDAPandSQLIdentity(
domain_scope=self.domains['domain1']['id']),
matchers.HasLength(1))
- def test_add_role_grant_to_user_and_project_404(self):
+ def test_add_role_grant_to_user_and_project_returns_not_found(self):
self.skipTest('Blocked by bug 1101287')
- def test_get_role_grants_for_user_and_project_404(self):
+ def test_get_role_grants_for_user_and_project_returns_not_found(self):
self.skipTest('Blocked by bug 1101287')
def test_list_projects_for_user_with_grants(self):
@@ -3223,6 +3132,25 @@ class DomainSpecificLDAPandSQLIdentity(
base = super(BaseLDAPIdentity, self)
base.test_list_role_assignments_filtered_by_role()
+ def test_delete_domain_with_project_api(self):
+ # With this restricted multi LDAP class, tests that use multiple
+ # domains and identity, are still not supported
+ self.assertRaises(
+ exception.DomainNotFound,
+ super(BaseLDAPIdentity, self).test_delete_domain_with_project_api)
+
+ def test_create_project_with_domain_id_and_without_parent_id(self):
+ # With restricted multi LDAP, tests that don't use identity, but do
+ # required aditional domains will work
+ base = super(BaseLDAPIdentity, self)
+ base.test_create_project_with_domain_id_and_without_parent_id()
+
+ def test_create_project_with_domain_id_mismatch_to_parent_domain(self):
+ # With restricted multi LDAP, tests that don't use identity, but do
+ # required aditional domains will work
+ base = super(BaseLDAPIdentity, self)
+ base.test_create_project_with_domain_id_mismatch_to_parent_domain()
+
class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
"""Class to test simplest use of domain-specific SQL driver.
@@ -3236,6 +3164,7 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
- A separate SQL backend for domain1
"""
+
def initial_setup(self, sqldb):
# We aren't setting up any initial data ahead of switching to
# domain-specific operation, so make the switch straight away.
@@ -3323,7 +3252,7 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
'domain2')
-class LdapFilterTests(test_backend.FilterTests, unit.TestCase):
+class LdapFilterTests(identity_tests.FilterTests, unit.TestCase):
def setUp(self):
super(LdapFilterTests, self).setUp()
@@ -3333,7 +3262,7 @@ class LdapFilterTests(test_backend.FilterTests, unit.TestCase):
self.load_backends()
self.load_fixtures(default_fixtures)
sqldb.recreate()
- _assert_backends(self, assignment='ldap', identity='ldap')
+ _assert_backends(self, identity='ldap')
def config_overrides(self):
super(LdapFilterTests, self).config_overrides()
@@ -3344,13 +3273,15 @@ class LdapFilterTests(test_backend.FilterTests, unit.TestCase):
config_files.append(unit.dirs.tests_conf('backend_ldap.conf'))
return config_files
- def test_list_users_in_group_filtered(self):
+ @wip('Not supported by LDAP identity driver')
+ def test_list_users_in_group_inexact_filtered(self):
+ # The LDAP identity driver currently does not support filtering on the
+ # listing users for a given group, so will fail this test.
+ super(LdapFilterTests,
+ self).test_list_users_in_group_inexact_filtered()
+
+ @wip('Not supported by LDAP identity driver')
+ def test_list_users_in_group_exact_filtered(self):
# The LDAP identity driver currently does not support filtering on the
# listing users for a given group, so will fail this test.
- try:
- super(LdapFilterTests, self).test_list_users_in_group_filtered()
- except matchers.MismatchError:
- return
- # We shouldn't get here...if we do, it means someone has implemented
- # filtering, so we can remove this test override.
- self.assertTrue(False)
+ super(LdapFilterTests, self).test_list_users_in_group_exact_filtered()
diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
index 2b714b57..ec789d04 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py
@@ -38,7 +38,7 @@ class LdapPoolCommonTestMixin(object):
# by default use_pool and use_auth_pool is enabled in test pool config
user_ref = self.identity_api.get_user(self.user_foo['id'])
self.user_foo.pop('password')
- self.assertDictEqual(user_ref, self.user_foo)
+ self.assertDictEqual(self.user_foo, user_ref)
handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True)
self.assertIsInstance(handler, ldap_core.PooledLDAPHandler)
@@ -151,22 +151,22 @@ class LdapPoolCommonTestMixin(object):
# Open 3 connections first
with _get_conn() as _: # conn1
- self.assertEqual(len(ldappool_cm), 1)
+ self.assertEqual(1, len(ldappool_cm))
with _get_conn() as _: # conn2
- self.assertEqual(len(ldappool_cm), 2)
+ self.assertEqual(2, len(ldappool_cm))
with _get_conn() as _: # conn2
_.unbind_ext_s()
- self.assertEqual(len(ldappool_cm), 3)
+ self.assertEqual(3, len(ldappool_cm))
# Then open 3 connections again and make sure size does not grow
# over 3
with _get_conn() as _: # conn1
- self.assertEqual(len(ldappool_cm), 1)
+ self.assertEqual(1, len(ldappool_cm))
with _get_conn() as _: # conn2
- self.assertEqual(len(ldappool_cm), 2)
+ self.assertEqual(2, len(ldappool_cm))
with _get_conn() as _: # conn3
_.unbind_ext_s()
- self.assertEqual(len(ldappool_cm), 3)
+ self.assertEqual(3, len(ldappool_cm))
def test_password_change_with_pool(self):
old_password = self.user_sna['password']
@@ -181,14 +181,14 @@ class LdapPoolCommonTestMixin(object):
self.user_sna.pop('password')
self.user_sna['enabled'] = True
- self.assertDictEqual(user_ref, self.user_sna)
+ self.assertDictEqual(self.user_sna, user_ref)
new_password = 'new_password'
user_ref['password'] = new_password
self.identity_api.update_user(user_ref['id'], user_ref)
# now authenticate again to make sure new password works with
- # conneciton pool
+ # connection pool
user_ref2 = self.identity_api.authenticate(
context={},
user_id=self.user_sna['id'],
@@ -207,14 +207,15 @@ class LdapPoolCommonTestMixin(object):
password=old_password)
-class LdapIdentitySqlAssignment(LdapPoolCommonTestMixin,
- test_backend_ldap.LdapIdentitySqlAssignment,
- unit.TestCase):
+class LDAPIdentity(LdapPoolCommonTestMixin,
+ test_backend_ldap.LDAPIdentity,
+ unit.TestCase):
"""Executes tests in existing base class with pooled LDAP handler."""
+
def setUp(self):
self.useFixture(mockpatch.PatchObject(
ldap_core.PooledLDAPHandler, 'Connector', fakeldap.FakeLdapPool))
- super(LdapIdentitySqlAssignment, self).setUp()
+ super(LDAPIdentity, self).setUp()
self.addCleanup(self.cleanup_pools)
# storing to local variable to avoid long references
@@ -225,7 +226,7 @@ class LdapIdentitySqlAssignment(LdapPoolCommonTestMixin,
self.identity_api.get_user(self.user_foo['id'])
def config_files(self):
- config_files = super(LdapIdentitySqlAssignment, self).config_files()
+ config_files = super(LDAPIdentity, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_ldap_pool.conf'))
return config_files
diff --git a/keystone-moon/keystone/tests/unit/test_backend_rules.py b/keystone-moon/keystone/tests/unit/test_backend_rules.py
index 9a11fddc..c32c3307 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_rules.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_rules.py
@@ -15,10 +15,10 @@
from keystone import exception
from keystone.tests import unit
-from keystone.tests.unit import test_backend
+from keystone.tests.unit.policy import test_backends as policy_tests
-class RulesPolicy(unit.TestCase, test_backend.PolicyTests):
+class RulesPolicy(unit.TestCase, policy_tests.PolicyTests):
def setUp(self):
super(RulesPolicy, self).setUp()
self.load_backends()
@@ -47,14 +47,17 @@ class RulesPolicy(unit.TestCase, test_backend.PolicyTests):
self.assertRaises(exception.NotImplemented,
super(RulesPolicy, self).test_delete)
- def test_get_policy_404(self):
+ def test_get_policy_returns_not_found(self):
self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_get_policy_404)
+ super(RulesPolicy,
+ self).test_get_policy_returns_not_found)
- def test_update_policy_404(self):
+ def test_update_policy_returns_not_found(self):
self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_update_policy_404)
+ super(RulesPolicy,
+ self).test_update_policy_returns_not_found)
- def test_delete_policy_404(self):
+ def test_delete_policy_returns_not_found(self):
self.assertRaises(exception.NotImplemented,
- super(RulesPolicy, self).test_delete_policy_404)
+ super(RulesPolicy,
+ self).test_delete_policy_returns_not_found)
diff --git a/keystone-moon/keystone/tests/unit/test_backend_sql.py b/keystone-moon/keystone/tests/unit/test_backend_sql.py
index 69fac63a..2e703fff 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_sql.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_sql.py
@@ -29,22 +29,28 @@ from keystone.common import driver_hints
from keystone.common import sql
from keystone import exception
from keystone.identity.backends import sql as identity_sql
+from keystone import resource
from keystone.tests import unit
+from keystone.tests.unit.assignment import test_backends as assignment_tests
+from keystone.tests.unit.catalog import test_backends as catalog_tests
from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.identity import test_backends as identity_tests
from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit import test_backend
+from keystone.tests.unit.policy import test_backends as policy_tests
+from keystone.tests.unit.resource import test_backends as resource_tests
+from keystone.tests.unit.token import test_backends as token_tests
+from keystone.tests.unit.trust import test_backends as trust_tests
from keystone.token.persistence.backends import sql as token_sql
CONF = cfg.CONF
-DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
class SqlTests(unit.SQLDriverOverrides, unit.TestCase):
def setUp(self):
super(SqlTests, self).setUp()
- self.useFixture(database.Database())
+ self.useFixture(database.Database(self.sql_driver_version_overrides))
self.load_backends()
# populate the engine with tables & fixtures
@@ -124,14 +130,33 @@ class SqlModels(SqlTests):
def test_user_model(self):
cols = (('id', sql.String, 64),
- ('name', sql.String, 255),
- ('password', sql.String, 128),
- ('domain_id', sql.String, 64),
('default_project_id', sql.String, 64),
('enabled', sql.Boolean, None),
('extra', sql.JsonBlob, None))
self.assertExpectedSchema('user', cols)
+ def test_local_user_model(self):
+ cols = (('id', sql.Integer, None),
+ ('user_id', sql.String, 64),
+ ('name', sql.String, 255),
+ ('domain_id', sql.String, 64))
+ self.assertExpectedSchema('local_user', cols)
+
+ def test_password_model(self):
+ cols = (('id', sql.Integer, None),
+ ('local_user_id', sql.Integer, None),
+ ('password', sql.String, 128))
+ self.assertExpectedSchema('password', cols)
+
+ def test_federated_user_model(self):
+ cols = (('id', sql.Integer, None),
+ ('user_id', sql.String, 64),
+ ('idp_id', sql.String, 64),
+ ('protocol_id', sql.String, 64),
+ ('unique_id', sql.String, 255),
+ ('display_name', sql.String, 255))
+ self.assertExpectedSchema('federated_user', cols)
+
def test_group_model(self):
cols = (('id', sql.String, 64),
('name', sql.String, 64),
@@ -171,17 +196,58 @@ class SqlModels(SqlTests):
('user_id', sql.String, 64))
self.assertExpectedSchema('user_group_membership', cols)
-
-class SqlIdentity(SqlTests, test_backend.IdentityTests):
+ def test_revocation_event_model(self):
+ cols = (('id', sql.Integer, None),
+ ('domain_id', sql.String, 64),
+ ('project_id', sql.String, 64),
+ ('user_id', sql.String, 64),
+ ('role_id', sql.String, 64),
+ ('trust_id', sql.String, 64),
+ ('consumer_id', sql.String, 64),
+ ('access_token_id', sql.String, 64),
+ ('issued_before', sql.DateTime, None),
+ ('expires_at', sql.DateTime, None),
+ ('revoked_at', sql.DateTime, None),
+ ('audit_id', sql.String, 32),
+ ('audit_chain_id', sql.String, 32))
+ self.assertExpectedSchema('revocation_event', cols)
+
+
+class SqlIdentity(SqlTests, identity_tests.IdentityTests,
+ assignment_tests.AssignmentTests,
+ resource_tests.ResourceTests):
def test_password_hashed(self):
- session = sql.get_session()
- user_ref = self.identity_api._get_user(session, self.user_foo['id'])
- self.assertNotEqual(user_ref['password'], self.user_foo['password'])
+ with sql.session_for_read() as session:
+ user_ref = self.identity_api._get_user(session,
+ self.user_foo['id'])
+ self.assertNotEqual(self.user_foo['password'],
+ user_ref['password'])
+
+ def test_create_user_with_null_password(self):
+ user_dict = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+ user_dict["password"] = None
+ new_user_dict = self.identity_api.create_user(user_dict)
+ with sql.session_for_read() as session:
+ new_user_ref = self.identity_api._get_user(session,
+ new_user_dict['id'])
+ self.assertFalse(new_user_ref.local_user.passwords)
+
+ def test_update_user_with_null_password(self):
+ user_dict = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.assertTrue(user_dict['password'])
+ new_user_dict = self.identity_api.create_user(user_dict)
+ new_user_dict["password"] = None
+ new_user_dict = self.identity_api.update_user(new_user_dict['id'],
+ new_user_dict)
+ with sql.session_for_read() as session:
+ new_user_ref = self.identity_api._get_user(session,
+ new_user_dict['id'])
+ self.assertFalse(new_user_ref.local_user.passwords)
def test_delete_user_with_project_association(self):
- user = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex}
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_bar['id'],
user['id'])
@@ -191,16 +257,15 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
user['id'])
def test_create_null_user_name(self):
- user = {'name': None,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex}
+ user = unit.new_user_ref(name=None,
+ domain_id=CONF.identity.default_domain_id)
self.assertRaises(exception.ValidationError,
self.identity_api.create_user,
user)
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user_by_name,
user['name'],
- DEFAULT_DOMAIN_ID)
+ CONF.identity.default_domain_id)
def test_create_user_case_sensitivity(self):
# user name case sensitivity is down to the fact that it is marked as
@@ -208,25 +273,59 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
# LDAP.
# create a ref with a lowercase name
- ref = {
- 'name': uuid.uuid4().hex.lower(),
- 'domain_id': DEFAULT_DOMAIN_ID}
+ ref = unit.new_user_ref(name=uuid.uuid4().hex.lower(),
+ domain_id=CONF.identity.default_domain_id)
ref = self.identity_api.create_user(ref)
# assign a new ID with the same name, but this time in uppercase
ref['name'] = ref['name'].upper()
self.identity_api.create_user(ref)
+ def test_create_federated_user_unique_constraint(self):
+ federated_dict = unit.new_federated_user_ref()
+ user_dict = self.shadow_users_api.create_federated_user(federated_dict)
+ user_dict = self.identity_api.get_user(user_dict["id"])
+ self.assertIsNotNone(user_dict["id"])
+ self.assertRaises(exception.Conflict,
+ self.shadow_users_api.create_federated_user,
+ federated_dict)
+
+ def test_get_federated_user(self):
+ federated_dict = unit.new_federated_user_ref()
+ user_dict_create = self.shadow_users_api.create_federated_user(
+ federated_dict)
+ user_dict_get = self.shadow_users_api.get_federated_user(
+ federated_dict["idp_id"],
+ federated_dict["protocol_id"],
+ federated_dict["unique_id"])
+ self.assertItemsEqual(user_dict_create, user_dict_get)
+ self.assertEqual(user_dict_create["id"], user_dict_get["id"])
+
+ def test_update_federated_user_display_name(self):
+ federated_dict = unit.new_federated_user_ref()
+ user_dict_create = self.shadow_users_api.create_federated_user(
+ federated_dict)
+ new_display_name = uuid.uuid4().hex
+ self.shadow_users_api.update_federated_user_display_name(
+ federated_dict["idp_id"],
+ federated_dict["protocol_id"],
+ federated_dict["unique_id"],
+ new_display_name)
+ user_ref = self.shadow_users_api._get_federated_user(
+ federated_dict["idp_id"],
+ federated_dict["protocol_id"],
+ federated_dict["unique_id"])
+ self.assertEqual(user_ref.federated_users[0].display_name,
+ new_display_name)
+ self.assertEqual(user_dict_create["id"], user_ref.id)
+
def test_create_project_case_sensitivity(self):
# project name case sensitivity is down to the fact that it is marked
# as an SQL UNIQUE column, which may not be valid for other backends,
# like LDAP.
# create a ref with a lowercase name
- ref = {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex.lower(),
- 'domain_id': DEFAULT_DOMAIN_ID}
+ ref = unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
self.resource_api.create_project(ref['id'], ref)
# assign a new ID with the same name, but this time in uppercase
@@ -235,25 +334,22 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
self.resource_api.create_project(ref['id'], ref)
def test_create_null_project_name(self):
- tenant = {'id': uuid.uuid4().hex,
- 'name': None,
- 'domain_id': DEFAULT_DOMAIN_ID}
+ project = unit.new_project_ref(
+ name=None, domain_id=CONF.identity.default_domain_id)
self.assertRaises(exception.ValidationError,
self.resource_api.create_project,
- tenant['id'],
- tenant)
+ project['id'],
+ project)
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project,
- tenant['id'])
+ project['id'])
self.assertRaises(exception.ProjectNotFound,
self.resource_api.get_project_by_name,
- tenant['name'],
- DEFAULT_DOMAIN_ID)
+ project['name'],
+ CONF.identity.default_domain_id)
def test_delete_project_with_user_association(self):
- user = {'name': 'fakeuser',
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'passwd'}
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_bar['id'],
user['id'])
@@ -261,52 +357,6 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
tenants = self.assignment_api.list_projects_for_user(user['id'])
self.assertEqual([], tenants)
- def test_metadata_removed_on_delete_user(self):
- # A test to check that the internal representation
- # or roles is correctly updated when a user is deleted
- user = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'passwd'}
- user = self.identity_api.create_user(user)
- role = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- self.assignment_api.add_role_to_user_and_project(
- user['id'],
- self.tenant_bar['id'],
- role['id'])
- self.identity_api.delete_user(user['id'])
-
- # Now check whether the internal representation of roles
- # has been deleted
- self.assertRaises(exception.MetadataNotFound,
- self.assignment_api._get_metadata,
- user['id'],
- self.tenant_bar['id'])
-
- def test_metadata_removed_on_delete_project(self):
- # A test to check that the internal representation
- # or roles is correctly updated when a project is deleted
- user = {'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': 'passwd'}
- user = self.identity_api.create_user(user)
- role = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
- self.role_api.create_role(role['id'], role)
- self.assignment_api.add_role_to_user_and_project(
- user['id'],
- self.tenant_bar['id'],
- role['id'])
- self.resource_api.delete_project(self.tenant_bar['id'])
-
- # Now check whether the internal representation of roles
- # has been deleted
- self.assertRaises(exception.MetadataNotFound,
- self.assignment_api._get_metadata,
- user['id'],
- self.tenant_bar['id'])
-
def test_update_project_returns_extra(self):
"""This tests for backwards-compatibility with an essex/folsom bug.
@@ -317,20 +367,17 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
This behavior is specific to the SQL driver.
"""
- tenant_id = uuid.uuid4().hex
arbitrary_key = uuid.uuid4().hex
arbitrary_value = uuid.uuid4().hex
- tenant = {
- 'id': tenant_id,
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- arbitrary_key: arbitrary_value}
- ref = self.resource_api.create_project(tenant_id, tenant)
+ project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ project[arbitrary_key] = arbitrary_value
+ ref = self.resource_api.create_project(project['id'], project)
self.assertEqual(arbitrary_value, ref[arbitrary_key])
self.assertIsNone(ref.get('extra'))
- tenant['name'] = uuid.uuid4().hex
- ref = self.resource_api.update_project(tenant_id, tenant)
+ ref['name'] = uuid.uuid4().hex
+ ref = self.resource_api.update_project(ref['id'], ref)
self.assertEqual(arbitrary_value, ref[arbitrary_key])
self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
@@ -346,11 +393,9 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
"""
arbitrary_key = uuid.uuid4().hex
arbitrary_value = uuid.uuid4().hex
- user = {
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex,
- arbitrary_key: arbitrary_value}
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
+ user[arbitrary_key] = arbitrary_value
+ del user["id"]
ref = self.identity_api.create_user(user)
self.assertEqual(arbitrary_value, ref[arbitrary_key])
self.assertIsNone(ref.get('password'))
@@ -365,30 +410,25 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key])
def test_sql_user_to_dict_null_default_project_id(self):
- user = {
- 'name': uuid.uuid4().hex,
- 'domain_id': DEFAULT_DOMAIN_ID,
- 'password': uuid.uuid4().hex}
-
+ user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id)
user = self.identity_api.create_user(user)
- session = sql.get_session()
- query = session.query(identity_sql.User)
- query = query.filter_by(id=user['id'])
- raw_user_ref = query.one()
- self.assertIsNone(raw_user_ref.default_project_id)
- user_ref = raw_user_ref.to_dict()
- self.assertNotIn('default_project_id', user_ref)
- session.close()
+ with sql.session_for_read() as session:
+ query = session.query(identity_sql.User)
+ query = query.filter_by(id=user['id'])
+ raw_user_ref = query.one()
+ self.assertIsNone(raw_user_ref.default_project_id)
+ user_ref = raw_user_ref.to_dict()
+ self.assertNotIn('default_project_id', user_ref)
+ session.close()
def test_list_domains_for_user(self):
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
+ user = unit.new_user_ref(domain_id=domain['id'])
- test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ test_domain1 = unit.new_domain_ref()
self.resource_api.create_domain(test_domain1['id'], test_domain1)
- test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ test_domain2 = unit.new_domain_ref()
self.resource_api.create_domain(test_domain2['id'], test_domain2)
user = self.identity_api.create_user(user)
@@ -407,21 +447,20 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
# Create two groups each with a role on a different domain, and
# make user1 a member of both groups. Both these new domains
# should now be included, along with any direct user grants.
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain['id'], 'enabled': True}
+ user = unit.new_user_ref(domain_id=domain['id'])
user = self.identity_api.create_user(user)
- group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
- group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
+ group2 = unit.new_group_ref(domain_id=domain['id'])
group2 = self.identity_api.create_group(group2)
- test_domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ test_domain1 = unit.new_domain_ref()
self.resource_api.create_domain(test_domain1['id'], test_domain1)
- test_domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ test_domain2 = unit.new_domain_ref()
self.resource_api.create_domain(test_domain2['id'], test_domain2)
- test_domain3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ test_domain3 = unit.new_domain_ref()
self.resource_api.create_domain(test_domain3['id'], test_domain3)
self.identity_api.add_user_to_group(user['id'], group1['id'])
@@ -451,17 +490,16 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
- When listing domains for user, neither domain should be returned
"""
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain1 = unit.new_domain_ref()
domain1 = self.resource_api.create_domain(domain1['id'], domain1)
- domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain2 = unit.new_domain_ref()
domain2 = self.resource_api.create_domain(domain2['id'], domain2)
- user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
- 'domain_id': domain1['id'], 'enabled': True}
+ user = unit.new_user_ref(domain_id=domain1['id'])
user = self.identity_api.create_user(user)
- group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']}
+ group = unit.new_group_ref(domain_id=domain1['id'])
group = self.identity_api.create_group(group)
self.identity_api.add_user_to_group(user['id'], group['id'])
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
# Create a grant on each domain, one user grant, one group grant,
@@ -480,25 +518,143 @@ class SqlIdentity(SqlTests, test_backend.IdentityTests):
# roles assignments.
self.assertThat(user_domains, matchers.HasLength(0))
+ def test_storing_null_domain_id_in_project_ref(self):
+ """Test the special storage of domain_id=None in sql resource driver.
+
+ The resource driver uses a special value in place of None for domain_id
+ in the project record. This shouldn't escape the driver. Hence we test
+ the interface to ensure that you can store a domain_id of None, and
+ that any special value used inside the driver does not escape through
+ the interface.
+
+ """
+ spoiler_project = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.resource_api.create_project(spoiler_project['id'],
+ spoiler_project)
+
+ # First let's create a project with a None domain_id and make sure we
+ # can read it back.
+ project = unit.new_project_ref(domain_id=None, is_domain=True)
+ project = self.resource_api.create_project(project['id'], project)
+ ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project, ref)
+
+ # Can we get it by name?
+ ref = self.resource_api.get_project_by_name(project['name'], None)
+ self.assertDictEqual(project, ref)
+
+ # Can we filter for them - create a second domain to ensure we are
+ # testing the receipt of more than one.
+ project2 = unit.new_project_ref(domain_id=None, is_domain=True)
+ project2 = self.resource_api.create_project(project2['id'], project2)
+ hints = driver_hints.Hints()
+ hints.add_filter('domain_id', None)
+ refs = self.resource_api.list_projects(hints)
+ self.assertThat(refs, matchers.HasLength(2 + self.domain_count))
+ self.assertIn(project, refs)
+ self.assertIn(project2, refs)
+
+ # Can we update it?
+ project['name'] = uuid.uuid4().hex
+ self.resource_api.update_project(project['id'], project)
+ ref = self.resource_api.get_project(project['id'])
+ self.assertDictEqual(project, ref)
+
+ # Finally, make sure we can delete it
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
+ self.resource_api.delete_project(project['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project['id'])
+
+ def test_hidden_project_domain_root_is_really_hidden(self):
+ """Ensure we cannot access the hidden root of all project domains.
+
+ Calling any of the driver methods should result in the same as
+ would be returned if we passed a project that does not exist. We don't
+ test create_project, since we do not allow a caller of our API to
+ specify their own ID for a new entity.
+
+ """
+ def _exercise_project_api(ref_id):
+ driver = self.resource_api.driver
+ self.assertRaises(exception.ProjectNotFound,
+ driver.get_project,
+ ref_id)
+
+ self.assertRaises(exception.ProjectNotFound,
+ driver.get_project_by_name,
+ resource.NULL_DOMAIN_ID,
+ ref_id)
+
+ project_ids = [x['id'] for x in
+ driver.list_projects(driver_hints.Hints())]
+ self.assertNotIn(ref_id, project_ids)
+
+ projects = driver.list_projects_from_ids([ref_id])
+ self.assertThat(projects, matchers.HasLength(0))
-class SqlTrust(SqlTests, test_backend.TrustTests):
+ project_ids = [x for x in
+ driver.list_project_ids_from_domain_ids([ref_id])]
+ self.assertNotIn(ref_id, project_ids)
+
+ self.assertRaises(exception.DomainNotFound,
+ driver.list_projects_in_domain,
+ ref_id)
+
+ project_ids = [
+ x['id'] for x in
+ driver.list_projects_acting_as_domain(driver_hints.Hints())]
+ self.assertNotIn(ref_id, project_ids)
+
+ projects = driver.list_projects_in_subtree(ref_id)
+ self.assertThat(projects, matchers.HasLength(0))
+
+ self.assertRaises(exception.ProjectNotFound,
+ driver.list_project_parents,
+ ref_id)
+
+ # A non-existing project just returns True from the driver
+ self.assertTrue(driver.is_leaf_project(ref_id))
+
+ self.assertRaises(exception.ProjectNotFound,
+ driver.update_project,
+ ref_id,
+ {})
+
+ self.assertRaises(exception.ProjectNotFound,
+ driver.delete_project,
+ ref_id)
+
+ # Deleting list of projects that includes a non-existing project
+ # should be silent
+ driver.delete_projects_from_ids([ref_id])
+
+ _exercise_project_api(uuid.uuid4().hex)
+ _exercise_project_api(resource.NULL_DOMAIN_ID)
+
+
+class SqlTrust(SqlTests, trust_tests.TrustTests):
pass
-class SqlToken(SqlTests, test_backend.TokenTests):
+class SqlToken(SqlTests, token_tests.TokenTests):
def test_token_revocation_list_uses_right_columns(self):
# This query used to be heavy with too many columns. We want
# to make sure it is only running with the minimum columns
# necessary.
expected_query_args = (token_sql.TokenModel.id,
- token_sql.TokenModel.expires)
+ token_sql.TokenModel.expires,
+ token_sql.TokenModel.extra,)
with mock.patch.object(token_sql, 'sql') as mock_sql:
tok = token_sql.Token()
tok.list_revoked_tokens()
- mock_query = mock_sql.get_session().query
+ mock_query = mock_sql.session_for_read().__enter__().query
mock_query.assert_called_with(*expected_query_args)
def test_flush_expired_tokens_batch(self):
@@ -523,8 +679,12 @@ class SqlToken(SqlTests, test_backend.TokenTests):
# other tests below test the differences between how they use the batch
# strategy
with mock.patch.object(token_sql, 'sql') as mock_sql:
- mock_sql.get_session().query().filter().delete.return_value = 0
- mock_sql.get_session().bind.dialect.name = 'mysql'
+ mock_sql.session_for_write().__enter__(
+ ).query().filter().delete.return_value = 0
+
+ mock_sql.session_for_write().__enter__(
+ ).bind.dialect.name = 'mysql'
+
tok = token_sql.Token()
expiry_mock = mock.Mock()
ITERS = [1, 2, 3]
@@ -535,7 +695,10 @@ class SqlToken(SqlTests, test_backend.TokenTests):
# The expiry strategy is only invoked once, the other calls are via
# the yield return.
self.assertEqual(1, expiry_mock.call_count)
- mock_delete = mock_sql.get_session().query().filter().delete
+
+ mock_delete = mock_sql.session_for_write().__enter__(
+ ).query().filter().delete
+
self.assertThat(mock_delete.call_args_list,
matchers.HasLength(len(ITERS)))
@@ -550,12 +713,12 @@ class SqlToken(SqlTests, test_backend.TokenTests):
if i == 0:
# The first time the batch iterator returns, it should return
# the first result that comes back from the database.
- self.assertEqual(x, 'test')
+ self.assertEqual('test', x)
elif i == 1:
# The second time, the database range function should return
# nothing, so the batch iterator returns the result of the
# upper_bound function
- self.assertEqual(x, "final value")
+ self.assertEqual("final value", x)
else:
self.fail("range batch function returned more than twice")
@@ -568,39 +731,30 @@ class SqlToken(SqlTests, test_backend.TokenTests):
tok = token_sql.Token()
db2_strategy = tok._expiry_range_strategy('ibm_db_sa')
self.assertIsInstance(db2_strategy, functools.partial)
- self.assertEqual(db2_strategy.func, token_sql._expiry_range_batched)
- self.assertEqual(db2_strategy.keywords, {'batch_size': 100})
+ self.assertEqual(token_sql._expiry_range_batched, db2_strategy.func)
+ self.assertEqual({'batch_size': 100}, db2_strategy.keywords)
def test_expiry_range_strategy_mysql(self):
tok = token_sql.Token()
mysql_strategy = tok._expiry_range_strategy('mysql')
self.assertIsInstance(mysql_strategy, functools.partial)
- self.assertEqual(mysql_strategy.func, token_sql._expiry_range_batched)
- self.assertEqual(mysql_strategy.keywords, {'batch_size': 1000})
+ self.assertEqual(token_sql._expiry_range_batched, mysql_strategy.func)
+ self.assertEqual({'batch_size': 1000}, mysql_strategy.keywords)
-class SqlCatalog(SqlTests, test_backend.CatalogTests):
+class SqlCatalog(SqlTests, catalog_tests.CatalogTests):
_legacy_endpoint_id_in_endpoint = True
_enabled_default_to_true_when_creating_endpoint = True
def test_catalog_ignored_malformed_urls(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service.copy())
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
malformed_url = "http://192.168.1.104:8774/v2/$(tenant)s"
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': None,
- 'service_id': service['id'],
- 'interface': 'public',
- 'url': malformed_url,
- }
+ endpoint = unit.new_endpoint_ref(service_id=service['id'],
+ url=malformed_url,
+ region_id=None)
self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
# NOTE(dstanek): there are no valid URLs, so nothing is in the catalog
@@ -608,21 +762,11 @@ class SqlCatalog(SqlTests, test_backend.CatalogTests):
self.assertEqual({}, catalog)
def test_get_catalog_with_empty_public_url(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service.copy())
-
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': None,
- 'interface': 'public',
- 'url': '',
- 'service_id': service['id'],
- }
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ endpoint = unit.new_endpoint_ref(url='', service_id=service['id'],
+ region_id=None)
self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy())
catalog = self.catalog_api.get_catalog('user', 'tenant')
@@ -633,22 +777,12 @@ class SqlCatalog(SqlTests, test_backend.CatalogTests):
self.assertIsNone(catalog_endpoint.get('adminURL'))
self.assertIsNone(catalog_endpoint.get('internalURL'))
- def test_create_endpoint_region_404(self):
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
- self.catalog_api.create_service(service['id'], service.copy())
-
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': uuid.uuid4().hex,
- 'service_id': service['id'],
- 'interface': 'public',
- 'url': uuid.uuid4().hex,
- }
+ def test_create_endpoint_region_returns_not_found(self):
+ service = unit.new_service_ref()
+ self.catalog_api.create_service(service['id'], service)
+
+ endpoint = unit.new_endpoint_ref(region_id=uuid.uuid4().hex,
+ service_id=service['id'])
self.assertRaises(exception.ValidationError,
self.catalog_api.create_endpoint,
@@ -656,21 +790,14 @@ class SqlCatalog(SqlTests, test_backend.CatalogTests):
endpoint.copy())
def test_create_region_invalid_id(self):
- region = {
- 'id': '0' * 256,
- 'description': '',
- 'extra': {},
- }
+ region = unit.new_region_ref(id='0' * 256)
self.assertRaises(exception.StringLengthExceeded,
self.catalog_api.create_region,
- region.copy())
+ region)
def test_create_region_invalid_parent_id(self):
- region = {
- 'id': uuid.uuid4().hex,
- 'parent_region_id': '0' * 256,
- }
+ region = unit.new_region_ref(parent_region_id='0' * 256)
self.assertRaises(exception.RegionNotFound,
self.catalog_api.create_region,
@@ -678,77 +805,57 @@ class SqlCatalog(SqlTests, test_backend.CatalogTests):
def test_delete_region_with_endpoint(self):
# create a region
- region = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
+ region = unit.new_region_ref()
self.catalog_api.create_region(region)
# create a child region
- child_region = {
- 'id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'parent_id': region['id']
- }
+ child_region = unit.new_region_ref(parent_region_id=region['id'])
self.catalog_api.create_region(child_region)
# create a service
- service = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- }
+ service = unit.new_service_ref()
self.catalog_api.create_service(service['id'], service)
# create an endpoint attached to the service and child region
- child_endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': child_region['id'],
- 'interface': uuid.uuid4().hex[:8],
- 'url': uuid.uuid4().hex,
- 'service_id': service['id'],
- }
+ child_endpoint = unit.new_endpoint_ref(region_id=child_region['id'],
+ service_id=service['id'])
+
self.catalog_api.create_endpoint(child_endpoint['id'], child_endpoint)
self.assertRaises(exception.RegionDeletionError,
self.catalog_api.delete_region,
child_region['id'])
# create an endpoint attached to the service and parent region
- endpoint = {
- 'id': uuid.uuid4().hex,
- 'region_id': region['id'],
- 'interface': uuid.uuid4().hex[:8],
- 'url': uuid.uuid4().hex,
- 'service_id': service['id'],
- }
+ endpoint = unit.new_endpoint_ref(region_id=region['id'],
+ service_id=service['id'])
+
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
self.assertRaises(exception.RegionDeletionError,
self.catalog_api.delete_region,
region['id'])
-class SqlPolicy(SqlTests, test_backend.PolicyTests):
+class SqlPolicy(SqlTests, policy_tests.PolicyTests):
pass
-class SqlInheritance(SqlTests, test_backend.InheritanceTests):
+class SqlInheritance(SqlTests, assignment_tests.InheritanceTests):
pass
-class SqlTokenCacheInvalidation(SqlTests, test_backend.TokenCacheInvalidation):
+class SqlImpliedRoles(SqlTests, assignment_tests.ImpliedRoleTests):
+ pass
+
+
+class SqlTokenCacheInvalidation(SqlTests, token_tests.TokenCacheInvalidation):
def setUp(self):
super(SqlTokenCacheInvalidation, self).setUp()
self._create_test_data()
-class SqlFilterTests(SqlTests, test_backend.FilterTests):
-
- def _get_user_name_field_size(self):
- return identity_sql.User.name.type.length
+class SqlFilterTests(SqlTests, identity_tests.FilterTests):
def clean_up_entities(self):
"""Clean up entity test data from Filter Test Cases."""
-
for entity in ['user', 'group', 'project']:
self._delete_test_data(entity, self.entity_list[entity])
self._delete_test_data(entity, self.domain1_entity_list[entity])
@@ -760,11 +867,12 @@ class SqlFilterTests(SqlTests, test_backend.FilterTests):
del self.domain1
def test_list_entities_filtered_by_domain(self):
- # NOTE(henry-nash): This method is here rather than in test_backend
- # since any domain filtering with LDAP is handled by the manager
- # layer (and is already tested elsewhere) not at the driver level.
+ # NOTE(henry-nash): This method is here rather than in
+ # unit.identity.test_backends since any domain filtering with LDAP is
+ # handled by the manager layer (and is already tested elsewhere) not at
+ # the driver level.
self.addCleanup(self.clean_up_entities)
- self.domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.domain1 = unit.new_domain_ref()
self.resource_api.create_domain(self.domain1['id'], self.domain1)
self.entity_list = {}
@@ -804,7 +912,7 @@ class SqlFilterTests(SqlTests, test_backend.FilterTests):
# See if we can add a SQL command...use the group table instead of the
# user table since 'user' is reserved word for SQLAlchemy.
- group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID}
+ group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id)
group = self.identity_api.create_group(group)
hints = driver_hints.Hints()
@@ -816,10 +924,10 @@ class SqlFilterTests(SqlTests, test_backend.FilterTests):
self.assertTrue(len(groups) > 0)
-class SqlLimitTests(SqlTests, test_backend.LimitTests):
+class SqlLimitTests(SqlTests, identity_tests.LimitTests):
def setUp(self):
super(SqlLimitTests, self).setUp()
- test_backend.LimitTests.setUp(self)
+ identity_tests.LimitTests.setUp(self)
class FakeTable(sql.ModelBase):
@@ -850,11 +958,6 @@ class SqlDecorators(unit.TestCase):
tt = FakeTable(col='a')
self.assertEqual('a', tt.col)
- def test_non_ascii_init(self):
- # NOTE(I159): Non ASCII characters must cause UnicodeDecodeError
- # if encoding is not provided explicitly.
- self.assertRaises(UnicodeDecodeError, FakeTable, col='Я')
-
def test_conflict_happend(self):
self.assertRaises(exception.Conflict, FakeTable().insert)
self.assertRaises(exception.UnexpectedError, FakeTable().update)
@@ -876,21 +979,15 @@ class SqlModuleInitialization(unit.TestCase):
class SqlCredential(SqlTests):
def _create_credential_with_user_id(self, user_id=uuid.uuid4().hex):
- credential_id = uuid.uuid4().hex
- new_credential = {
- 'id': credential_id,
- 'user_id': user_id,
- 'project_id': uuid.uuid4().hex,
- 'blob': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'extra': uuid.uuid4().hex
- }
- self.credential_api.create_credential(credential_id, new_credential)
- return new_credential
+ credential = unit.new_credential_ref(user_id=user_id,
+ extra=uuid.uuid4().hex,
+ type=uuid.uuid4().hex)
+ self.credential_api.create_credential(credential['id'], credential)
+ return credential
def _validateCredentialList(self, retrieved_credentials,
expected_credentials):
- self.assertEqual(len(retrieved_credentials), len(expected_credentials))
+ self.assertEqual(len(expected_credentials), len(retrieved_credentials))
retrived_ids = [c['id'] for c in retrieved_credentials]
for cred in expected_credentials:
self.assertIn(cred['id'], retrived_ids)
@@ -920,3 +1017,9 @@ class SqlCredential(SqlTests):
credentials = self.credential_api.list_credentials_for_user(
self.user_foo['id'])
self._validateCredentialList(credentials, self.user_credentials)
+
+ def test_list_credentials_for_user_and_type(self):
+ cred = self.user_credentials[0]
+ credentials = self.credential_api.list_credentials_for_user(
+ self.user_foo['id'], type=cred['type'])
+ self._validateCredentialList(credentials, [cred])
diff --git a/keystone-moon/keystone/tests/unit/test_backend_templated.py b/keystone-moon/keystone/tests/unit/test_backend_templated.py
index 4a7bf9e5..ca957e78 100644
--- a/keystone-moon/keystone/tests/unit/test_backend_templated.py
+++ b/keystone-moon/keystone/tests/unit/test_backend_templated.py
@@ -19,16 +19,16 @@ from six.moves import zip
from keystone import catalog
from keystone.tests import unit
+from keystone.tests.unit.catalog import test_backends as catalog_tests
from keystone.tests.unit import default_fixtures
from keystone.tests.unit.ksfixtures import database
-from keystone.tests.unit import test_backend
BROKEN_WRITE_FUNCTIONALITY_MSG = ("Templated backend doesn't correctly "
"implement write operations")
-class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
+class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests):
DEFAULT_FIXTURE = {
'RegionOne': {
@@ -64,8 +64,11 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
def test_get_catalog(self):
catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
- self.assertDictEqual(catalog_ref, self.DEFAULT_FIXTURE)
+ self.assertDictEqual(self.DEFAULT_FIXTURE, catalog_ref)
+ # NOTE(lbragstad): This test is skipped because the catalog is being
+ # modified within the test and not through the API.
+ @unit.skip_if_cache_is_enabled('catalog')
def test_catalog_ignored_malformed_urls(self):
# both endpoints are in the catalog
catalog_ref = self.catalog_api.get_catalog('foo', 'bar')
@@ -85,7 +88,9 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
self.skipTest("Templated backend doesn't have disabled endpoints")
def assert_catalogs_equal(self, expected, observed):
- for e, o in zip(sorted(expected), sorted(observed)):
+ sort_key = lambda d: d['id']
+ for e, o in zip(sorted(expected, key=sort_key),
+ sorted(observed, key=sort_key)):
expected_endpoints = e.pop('endpoints')
observed_endpoints = o.pop('endpoints')
self.assertDictEqual(e, o)
@@ -126,9 +131,10 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
user_id = uuid.uuid4().hex
+ tenant_id = None
# If the URL has no 'tenant_id' to substitute, we will skip the
# endpoint which contains this kind of URL.
- catalog_ref = self.catalog_api.get_v3_catalog(user_id, tenant_id=None)
+ catalog_ref = self.catalog_api.get_v3_catalog(user_id, tenant_id)
exp_catalog = [
{'endpoints': [],
'type': 'compute',
@@ -155,8 +161,24 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
def test_service_filtering(self):
self.skipTest("Templated backend doesn't support filtering")
+ def test_list_services_with_hints(self):
+ hints = {}
+ services = self.catalog_api.list_services(hints=hints)
+ exp_services = [
+ {'type': 'compute',
+ 'description': '',
+ 'enabled': True,
+ 'name': "'Compute Service'",
+ 'id': 'compute'},
+ {'type': 'identity',
+ 'description': '',
+ 'enabled': True,
+ 'name': "'Identity Service'",
+ 'id': 'identity'}]
+ self.assertItemsEqual(exp_services, services)
+
# NOTE(dstanek): the following methods have been overridden
- # from test_backend.CatalogTests
+ # from unit.catalog.test_backends.CatalogTests.
def test_region_crud(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
@@ -172,10 +194,10 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
def test_create_region_with_duplicate_id(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
- def test_delete_region_404(self):
+ def test_delete_region_returns_not_found(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
- def test_create_region_invalid_parent_region_404(self):
+ def test_create_region_invalid_parent_region_returns_not_found(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
def test_avoid_creating_circular_references_in_regions_update(self):
@@ -203,7 +225,7 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
def test_cache_layer_delete_service_with_endpoint(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
- def test_delete_service_404(self):
+ def test_delete_service_returns_not_found(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
def test_update_endpoint_nonexistent_service(self):
@@ -215,10 +237,10 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
def test_update_endpoint_nonexistent_region(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
- def test_get_endpoint_404(self):
+ def test_get_endpoint_returns_not_found(self):
self.skipTest("Templated backend doesn't use IDs for endpoints.")
- def test_delete_endpoint_404(self):
+ def test_delete_endpoint_returns_not_found(self):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
def test_create_endpoint(self):
@@ -228,11 +250,11 @@ class TestTemplatedCatalog(unit.TestCase, test_backend.CatalogTests):
self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG)
def test_list_endpoints(self):
- # NOTE(dstanek): a future commit will fix this functionality and
- # this test
- expected_ids = set()
+ expected_urls = set(['http://localhost:$(public_port)s/v2.0',
+ 'http://localhost:$(admin_port)s/v2.0',
+ 'http://localhost:8774/v1.1/$(tenant_id)s'])
endpoints = self.catalog_api.list_endpoints()
- self.assertEqual(expected_ids, set(e['id'] for e in endpoints))
+ self.assertEqual(expected_urls, set(e['url'] for e in endpoints))
@unit.skip_if_cache_disabled('catalog')
def test_invalidate_cache_when_updating_endpoint(self):
diff --git a/keystone-moon/keystone/tests/unit/test_catalog.py b/keystone-moon/keystone/tests/unit/test_catalog.py
index ada2de43..76e3055a 100644
--- a/keystone-moon/keystone/tests/unit/test_catalog.py
+++ b/keystone-moon/keystone/tests/unit/test_catalog.py
@@ -31,12 +31,9 @@ class V2CatalogTestCase(rest.RestfulTestCase):
super(V2CatalogTestCase, self).setUp()
self.useFixture(database.Database())
- self.service_id = uuid.uuid4().hex
self.service = unit.new_service_ref()
- self.service['id'] = self.service_id
- self.catalog_api.create_service(
- self.service_id,
- self.service.copy())
+ self.service_id = self.service['id']
+ self.catalog_api.create_service(self.service_id, self.service)
# TODO(termie): add an admin user to the fixtures and use that user
# override the fixtures, for now
@@ -53,13 +50,14 @@ class V2CatalogTestCase(rest.RestfulTestCase):
"""Applicable only to JSON."""
return r.result['access']['token']['id']
- def _endpoint_create(self, expected_status=200, service_id=SERVICE_FIXTURE,
+ def _endpoint_create(self, expected_status=http_client.OK,
+ service_id=SERVICE_FIXTURE,
publicurl='http://localhost:8080',
internalurl='http://localhost:8080',
adminurl='http://localhost:8080'):
if service_id is SERVICE_FIXTURE:
service_id = self.service_id
- # FIXME(dolph): expected status should actually be 201 Created
+
path = '/v2.0/endpoints'
body = {
'endpoint': {
@@ -77,40 +75,33 @@ class V2CatalogTestCase(rest.RestfulTestCase):
return body, r
def _region_create(self):
- region_id = uuid.uuid4().hex
- self.catalog_api.create_region({'id': region_id})
+ region = unit.new_region_ref()
+ region_id = region['id']
+ self.catalog_api.create_region(region)
return region_id
- def _service_create(self):
- service_id = uuid.uuid4().hex
- service = unit.new_service_ref()
- service['id'] = service_id
- self.catalog_api.create_service(service_id, service)
- return service_id
-
def test_endpoint_create(self):
req_body, response = self._endpoint_create()
self.assertIn('endpoint', response.result)
self.assertIn('id', response.result['endpoint'])
for field, value in req_body['endpoint'].items():
- self.assertEqual(response.result['endpoint'][field], value)
+ self.assertEqual(value, response.result['endpoint'][field])
def test_pure_v3_endpoint_with_publicurl_visible_from_v2(self):
- """Test pure v3 endpoint can be fetched via v2 API.
+ """Test pure v3 endpoint can be fetched via v2.0 API.
- For those who are using v2 APIs, endpoints created by v3 API should
+ For those who are using v2.0 APIs, endpoints created by v3 API should
also be visible as there are no differences about the endpoints
- except the format or the internal implementation.
- And because public url is required for v2 API, so only the v3 endpoints
- of the service which has the public interface endpoint will be
- converted into v2 endpoints.
+ except the format or the internal implementation. Since publicURL is
+ required for v2.0 API, so only v3 endpoints of the service which have
+ the public interface endpoint will be converted into v2.0 endpoints.
"""
region_id = self._region_create()
- service_id = self._service_create()
- # create a v3 endpoint with three interfaces
+
+ # create v3 endpoints with three interfaces
body = {
- 'endpoint': unit.new_endpoint_ref(service_id,
- default_region_id=region_id)
+ 'endpoint': unit.new_endpoint_ref(self.service_id,
+ region_id=region_id)
}
for interface in catalog.controllers.INTERFACES:
body['endpoint']['interface'] = interface
@@ -122,11 +113,11 @@ class V2CatalogTestCase(rest.RestfulTestCase):
r = self.admin_request(token=self.get_scoped_token(),
path='/v2.0/endpoints')
- # v3 endpoints having public url can be fetched via v2.0 API
+ # Endpoints of the service which have a public interface endpoint
+ # will be returned via v2.0 API
self.assertEqual(1, len(r.result['endpoints']))
v2_endpoint = r.result['endpoints'][0]
- self.assertEqual(service_id, v2_endpoint['service_id'])
- # check urls just in case.
+ self.assertEqual(self.service_id, v2_endpoint['service_id'])
# This is not the focus of this test, so no different urls are used.
self.assertEqual(body['endpoint']['url'], v2_endpoint['publicurl'])
self.assertEqual(body['endpoint']['url'], v2_endpoint['adminurl'])
@@ -134,23 +125,23 @@ class V2CatalogTestCase(rest.RestfulTestCase):
self.assertNotIn('name', v2_endpoint)
v3_endpoint = self.catalog_api.get_endpoint(v2_endpoint['id'])
- # it's the v3 public endpoint's id as the generated v2 endpoint
+ # Checks the v3 public endpoint's id is the generated v2.0 endpoint
self.assertEqual('public', v3_endpoint['interface'])
- self.assertEqual(service_id, v3_endpoint['service_id'])
+ self.assertEqual(self.service_id, v3_endpoint['service_id'])
def test_pure_v3_endpoint_without_publicurl_invisible_from_v2(self):
- """Test pure v3 endpoint without public url can't be fetched via v2 API.
+ """Test that the v2.0 API can't fetch v3 endpoints without publicURLs.
- V2 API will return endpoints created by v3 API, but because public url
- is required for v2 API, so v3 endpoints without public url will be
- ignored.
+ v2.0 API will return endpoints created by v3 API, but publicURL is
+ required for the service in the v2.0 API, therefore v3 endpoints of
+ a service which don't have publicURL will be ignored.
"""
region_id = self._region_create()
- service_id = self._service_create()
+
# create a v3 endpoint without public interface
body = {
- 'endpoint': unit.new_endpoint_ref(service_id,
- default_region_id=region_id)
+ 'endpoint': unit.new_endpoint_ref(self.service_id,
+ region_id=region_id)
}
for interface in catalog.controllers.INTERFACES:
if interface == 'public':
@@ -164,7 +155,8 @@ class V2CatalogTestCase(rest.RestfulTestCase):
r = self.admin_request(token=self.get_scoped_token(),
path='/v2.0/endpoints')
- # v3 endpoints without public url won't be fetched via v2.0 API
+ # v3 endpoints of a service which don't have publicURL can't be
+ # fetched via v2.0 API
self.assertEqual(0, len(r.result['endpoints']))
def test_endpoint_create_with_null_adminurl(self):
@@ -209,7 +201,7 @@ class V2CatalogTestCase(rest.RestfulTestCase):
valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
# baseline tests that all valid URLs works
- self._endpoint_create(expected_status=200,
+ self._endpoint_create(expected_status=http_client.OK,
publicurl=valid_url,
internalurl=valid_url,
adminurl=valid_url)
@@ -297,28 +289,23 @@ class TestV2CatalogAPISQL(unit.TestCase):
self.useFixture(database.Database())
self.catalog_api = catalog.Manager()
- self.service_id = uuid.uuid4().hex
- service = {'id': self.service_id, 'name': uuid.uuid4().hex}
+ service = unit.new_service_ref()
+ self.service_id = service['id']
self.catalog_api.create_service(self.service_id, service)
- endpoint = self.new_endpoint_ref(service_id=self.service_id)
+ self.create_endpoint(service_id=self.service_id)
+
+ def create_endpoint(self, service_id, **kwargs):
+ endpoint = unit.new_endpoint_ref(service_id=service_id,
+ region_id=None,
+ **kwargs)
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ return endpoint
def config_overrides(self):
super(TestV2CatalogAPISQL, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
- def new_endpoint_ref(self, service_id):
- return {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'interface': uuid.uuid4().hex[:8],
- 'service_id': service_id,
- 'url': uuid.uuid4().hex,
- 'region': uuid.uuid4().hex,
- }
-
def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
@@ -330,14 +317,12 @@ class TestV2CatalogAPISQL(unit.TestCase):
self.assertEqual(1, len(self.catalog_api.list_endpoints()))
# create a new, invalid endpoint - malformed type declaration
- endpoint = self.new_endpoint_ref(self.service_id)
- endpoint['url'] = 'http://keystone/%(tenant_id)'
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.create_endpoint(self.service_id,
+ url='http://keystone/%(tenant_id)')
# create a new, invalid endpoint - nonexistent key
- endpoint = self.new_endpoint_ref(self.service_id)
- endpoint['url'] = 'http://keystone/%(you_wont_find_me)s'
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.create_endpoint(self.service_id,
+ url='http://keystone/%(you_wont_find_me)s')
# verify that the invalid endpoints don't appear in the catalog
catalog = self.catalog_api.get_catalog(user_id, tenant_id)
@@ -349,28 +334,22 @@ class TestV2CatalogAPISQL(unit.TestCase):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
- # create a service, with a name
- named_svc = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- }
+ # new_service_ref() returns a ref with a `name`.
+ named_svc = unit.new_service_ref()
self.catalog_api.create_service(named_svc['id'], named_svc)
- endpoint = self.new_endpoint_ref(service_id=named_svc['id'])
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.create_endpoint(service_id=named_svc['id'])
- # create a service, with no name
- unnamed_svc = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex
- }
+ # This time manually delete the generated `name`.
+ unnamed_svc = unit.new_service_ref()
+ del unnamed_svc['name']
self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
- endpoint = self.new_endpoint_ref(service_id=unnamed_svc['id'])
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.create_endpoint(service_id=unnamed_svc['id'])
region = None
catalog = self.catalog_api.get_catalog(user_id, tenant_id)
self.assertEqual(named_svc['name'],
catalog[region][named_svc['type']]['name'])
+
+ # verify a name is not generated when the service is passed to the API
self.assertEqual('', catalog[region][unnamed_svc['type']]['name'])
diff --git a/keystone-moon/keystone/tests/unit/test_cert_setup.py b/keystone-moon/keystone/tests/unit/test_cert_setup.py
index 769e7c8e..debf87f5 100644
--- a/keystone-moon/keystone/tests/unit/test_cert_setup.py
+++ b/keystone-moon/keystone/tests/unit/test_cert_setup.py
@@ -17,6 +17,7 @@ import os
import shutil
import mock
+from six.moves import http_client
from testtools import matchers
from keystone.common import environment
@@ -29,7 +30,6 @@ from keystone import token
SSLDIR = unit.dirs.tmp('ssl')
CONF = unit.CONF
-DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
CERTDIR = os.path.join(SSLDIR, 'certs')
@@ -74,17 +74,12 @@ class CertSetupTestCase(rest.RestfulTestCase):
controller = token.controllers.Auth()
self.config_fixture.config(group='signing', certfile='invalid')
- password = 'fake1'
- user = {
- 'name': 'fake1',
- 'password': password,
- 'domain_id': DEFAULT_DOMAIN_ID
- }
- user = self.identity_api.create_user(user)
+ user = unit.create_user(self.identity_api,
+ domain_id=CONF.identity.default_domain_id)
body_dict = {
'passwordCredentials': {
'userId': user['id'],
- 'password': password,
+ 'password': user['password'],
},
}
self.assertRaises(exception.UnexpectedError,
@@ -113,11 +108,13 @@ class CertSetupTestCase(rest.RestfulTestCase):
# requests don't have some of the normal information
signing_resp = self.request(self.public_app,
'/v2.0/certificates/signing',
- method='GET', expected_status=200)
+ method='GET',
+ expected_status=http_client.OK)
cacert_resp = self.request(self.public_app,
'/v2.0/certificates/ca',
- method='GET', expected_status=200)
+ method='GET',
+ expected_status=http_client.OK)
with open(CONF.signing.certfile) as f:
self.assertEqual(f.read(), signing_resp.text)
@@ -133,7 +130,7 @@ class CertSetupTestCase(rest.RestfulTestCase):
for accept in [None, 'text/html', 'application/json', 'text/xml']:
headers = {'Accept': accept} if accept else {}
resp = self.request(self.public_app, path, method='GET',
- expected_status=200,
+ expected_status=http_client.OK,
headers=headers)
self.assertEqual('text/html', resp.content_type)
@@ -146,7 +143,7 @@ class CertSetupTestCase(rest.RestfulTestCase):
def test_failure(self):
for path in ['/v2.0/certificates/signing', '/v2.0/certificates/ca']:
self.request(self.public_app, path, method='GET',
- expected_status=500)
+ expected_status=http_client.INTERNAL_SERVER_ERROR)
def test_pki_certs_rebuild(self):
self.test_create_pki_certs()
@@ -228,15 +225,17 @@ class TestExecCommand(unit.TestCase):
ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group')
ssl.exec_command(['ls'])
- @mock.patch.object(environment.subprocess.Popen, 'communicate')
- @mock.patch.object(environment.subprocess.Popen, 'poll')
- def test_running_an_invalid_command(self, mock_poll, mock_communicate):
+ @mock.patch.object(environment.subprocess, 'check_output')
+ def test_running_an_invalid_command(self, mock_check_output):
+ cmd = ['ls']
+
output = 'this is the output string'
- mock_communicate.return_value = (output, '')
- mock_poll.return_value = 1
+ error = environment.subprocess.CalledProcessError(returncode=1,
+ cmd=cmd,
+ output=output)
+ mock_check_output.side_effect = error
- cmd = ['ls']
ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group')
e = self.assertRaises(environment.subprocess.CalledProcessError,
ssl.exec_command,
diff --git a/keystone-moon/keystone/tests/unit/test_cli.py b/keystone-moon/keystone/tests/unit/test_cli.py
index d967eb53..06f2e172 100644
--- a/keystone-moon/keystone/tests/unit/test_cli.py
+++ b/keystone-moon/keystone/tests/unit/test_cli.py
@@ -15,9 +15,11 @@
import os
import uuid
+import fixtures
import mock
from oslo_config import cfg
from six.moves import range
+from testtools import matchers
from keystone.cmd import cli
from keystone.common import dependency
@@ -42,6 +44,199 @@ class CliTestCase(unit.SQLDriverOverrides, unit.TestCase):
cli.TokenFlush.main()
+class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase):
+
+ def setUp(self):
+ self.useFixture(database.Database())
+ super(CliBootStrapTestCase, self).setUp()
+
+ def config_files(self):
+ self.config_fixture.register_cli_opt(cli.command_opt)
+ config_files = super(CliBootStrapTestCase, self).config_files()
+ config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
+ return config_files
+
+ def config(self, config_files):
+ CONF(args=['bootstrap', '--bootstrap-password', uuid.uuid4().hex],
+ project='keystone',
+ default_config_files=config_files)
+
+ def test_bootstrap(self):
+ bootstrap = cli.BootStrap()
+ self._do_test_bootstrap(bootstrap)
+
+ def _do_test_bootstrap(self, bootstrap):
+ bootstrap.do_bootstrap()
+ project = bootstrap.resource_manager.get_project_by_name(
+ bootstrap.project_name,
+ 'default')
+ user = bootstrap.identity_manager.get_user_by_name(
+ bootstrap.username,
+ 'default')
+ role = bootstrap.role_manager.get_role(bootstrap.role_id)
+ role_list = (
+ bootstrap.assignment_manager.get_roles_for_user_and_project(
+ user['id'],
+ project['id']))
+ self.assertIs(len(role_list), 1)
+ self.assertEqual(role_list[0], role['id'])
+ # NOTE(morganfainberg): Pass an empty context, it isn't used by
+ # `authenticate` method.
+ bootstrap.identity_manager.authenticate(
+ {},
+ user['id'],
+ bootstrap.password)
+
+ if bootstrap.region_id:
+ region = bootstrap.catalog_manager.get_region(bootstrap.region_id)
+ self.assertEqual(self.region_id, region['id'])
+
+ if bootstrap.service_id:
+ svc = bootstrap.catalog_manager.get_service(bootstrap.service_id)
+ self.assertEqual(self.service_name, svc['name'])
+
+ self.assertEqual(set(['admin', 'public', 'internal']),
+ set(bootstrap.endpoints))
+
+ urls = {'public': self.public_url,
+ 'internal': self.internal_url,
+ 'admin': self.admin_url}
+
+ for interface, url in urls.items():
+ endpoint_id = bootstrap.endpoints[interface]
+ endpoint = bootstrap.catalog_manager.get_endpoint(endpoint_id)
+
+ self.assertEqual(self.region_id, endpoint['region_id'])
+ self.assertEqual(url, endpoint['url'])
+ self.assertEqual(svc['id'], endpoint['service_id'])
+ self.assertEqual(interface, endpoint['interface'])
+
+ def test_bootstrap_is_idempotent(self):
+ # NOTE(morganfainberg): Ensure we can run bootstrap multiple times
+ # without erroring.
+ bootstrap = cli.BootStrap()
+ self._do_test_bootstrap(bootstrap)
+ self._do_test_bootstrap(bootstrap)
+
+
+class CliBootStrapTestCaseWithEnvironment(CliBootStrapTestCase):
+
+ def config(self, config_files):
+ CONF(args=['bootstrap'], project='keystone',
+ default_config_files=config_files)
+
+ def setUp(self):
+ super(CliBootStrapTestCaseWithEnvironment, self).setUp()
+ self.password = uuid.uuid4().hex
+ self.username = uuid.uuid4().hex
+ self.project_name = uuid.uuid4().hex
+ self.role_name = uuid.uuid4().hex
+ self.service_name = uuid.uuid4().hex
+ self.public_url = uuid.uuid4().hex
+ self.internal_url = uuid.uuid4().hex
+ self.admin_url = uuid.uuid4().hex
+ self.region_id = uuid.uuid4().hex
+ self.default_domain = {
+ 'id': CONF.identity.default_domain_id,
+ 'name': 'Default',
+ }
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_PASSWORD',
+ newvalue=self.password))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_USERNAME',
+ newvalue=self.username))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_PROJECT_NAME',
+ newvalue=self.project_name))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_ROLE_NAME',
+ newvalue=self.role_name))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_SERVICE_NAME',
+ newvalue=self.service_name))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_PUBLIC_URL',
+ newvalue=self.public_url))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_INTERNAL_URL',
+ newvalue=self.internal_url))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_ADMIN_URL',
+ newvalue=self.admin_url))
+ self.useFixture(
+ fixtures.EnvironmentVariable('OS_BOOTSTRAP_REGION_ID',
+ newvalue=self.region_id))
+
+ def test_assignment_created_with_user_exists(self):
+ # test assignment can be created if user already exists.
+ bootstrap = cli.BootStrap()
+ bootstrap.resource_manager.create_domain(self.default_domain['id'],
+ self.default_domain)
+ user_ref = unit.new_user_ref(self.default_domain['id'],
+ name=self.username,
+ password=self.password)
+ bootstrap.identity_manager.create_user(user_ref)
+ self._do_test_bootstrap(bootstrap)
+
+ def test_assignment_created_with_project_exists(self):
+ # test assignment can be created if project already exists.
+ bootstrap = cli.BootStrap()
+ bootstrap.resource_manager.create_domain(self.default_domain['id'],
+ self.default_domain)
+ project_ref = unit.new_project_ref(self.default_domain['id'],
+ name=self.project_name)
+ bootstrap.resource_manager.create_project(project_ref['id'],
+ project_ref)
+ self._do_test_bootstrap(bootstrap)
+
+ def test_assignment_created_with_role_exists(self):
+ # test assignment can be created if role already exists.
+ bootstrap = cli.BootStrap()
+ bootstrap.resource_manager.create_domain(self.default_domain['id'],
+ self.default_domain)
+ role = unit.new_role_ref(name=self.role_name)
+ bootstrap.role_manager.create_role(role['id'], role)
+ self._do_test_bootstrap(bootstrap)
+
+ def test_assignment_created_with_region_exists(self):
+ # test assignment can be created if role already exists.
+ bootstrap = cli.BootStrap()
+ bootstrap.resource_manager.create_domain(self.default_domain['id'],
+ self.default_domain)
+ region = unit.new_region_ref(id=self.region_id)
+ bootstrap.catalog_manager.create_region(region)
+ self._do_test_bootstrap(bootstrap)
+
+ def test_endpoints_created_with_service_exists(self):
+ # test assignment can be created if role already exists.
+ bootstrap = cli.BootStrap()
+ bootstrap.resource_manager.create_domain(self.default_domain['id'],
+ self.default_domain)
+ service = unit.new_service_ref(name=self.service_name)
+ bootstrap.catalog_manager.create_service(service['id'], service)
+ self._do_test_bootstrap(bootstrap)
+
+ def test_endpoints_created_with_endpoint_exists(self):
+ # test assignment can be created if role already exists.
+ bootstrap = cli.BootStrap()
+ bootstrap.resource_manager.create_domain(self.default_domain['id'],
+ self.default_domain)
+ service = unit.new_service_ref(name=self.service_name)
+ bootstrap.catalog_manager.create_service(service['id'], service)
+
+ region = unit.new_region_ref(id=self.region_id)
+ bootstrap.catalog_manager.create_region(region)
+
+ endpoint = unit.new_endpoint_ref(interface='public',
+ service_id=service['id'],
+ url=self.public_url,
+ region_id=self.region_id)
+ bootstrap.catalog_manager.create_endpoint(endpoint['id'], endpoint)
+
+ self._do_test_bootstrap(bootstrap)
+
+
class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase):
def setUp(self):
@@ -112,7 +307,8 @@ class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase):
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
- 'identity': {'driver': 'ldap'}
+ 'identity': {'driver': 'ldap',
+ 'list_limit': '101'}
}
domain2_config = {
'ldap': {'url': 'fake://memory',
@@ -182,8 +378,8 @@ class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase):
# Now try and upload the settings in the configuration file for the
# default domain
dependency.reset()
- with mock.patch('__builtin__.print') as mock_print:
- self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ with mock.patch('six.moves.builtins.print') as mock_print:
+ self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
file_name = ('keystone.%s.conf' %
resource.calc_default_domain()['name'])
error_msg = _(
@@ -208,8 +404,8 @@ class CliDomainConfigNoOptionsTestCase(CliDomainConfigAllTestCase):
def test_config_upload(self):
dependency.reset()
- with mock.patch('__builtin__.print') as mock_print:
- self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ with mock.patch('six.moves.builtins.print') as mock_print:
+ self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
mock_print.assert_has_calls(
[mock.call(
_('At least one option must be provided, use either '
@@ -225,8 +421,8 @@ class CliDomainConfigTooManyOptionsTestCase(CliDomainConfigAllTestCase):
def test_config_upload(self):
dependency.reset()
- with mock.patch('__builtin__.print') as mock_print:
- self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ with mock.patch('six.moves.builtins.print') as mock_print:
+ self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
mock_print.assert_has_calls(
[mock.call(_('The --all option cannot be used with '
'the --domain-name option'))])
@@ -242,8 +438,8 @@ class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase):
def test_config_upload(self):
dependency.reset()
- with mock.patch('__builtin__.print') as mock_print:
- self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
+ with mock.patch('six.moves.builtins.print') as mock_print:
+ self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main)
file_name = 'keystone.%s.conf' % self.invalid_domain_name
error_msg = (_(
'Invalid domain name: %(domain)s found in config file name: '
@@ -252,3 +448,31 @@ class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase):
'file': os.path.join(CONF.identity.domain_config_dir,
file_name)})
mock_print.assert_has_calls([mock.call(error_msg)])
+
+
+class TestDomainConfigFinder(unit.BaseTestCase):
+
+ def setUp(self):
+ super(TestDomainConfigFinder, self).setUp()
+ self.logging = self.useFixture(fixtures.LoggerFixture())
+
+ @mock.patch('os.walk')
+ def test_finder_ignores_files(self, mock_walk):
+ mock_walk.return_value = [
+ ['.', [], ['file.txt', 'keystone.conf', 'keystone.domain0.conf']],
+ ]
+
+ domain_configs = list(cli._domain_config_finder('.'))
+
+ expected_domain_configs = [('./keystone.domain0.conf', 'domain0')]
+ self.assertThat(domain_configs,
+ matchers.Equals(expected_domain_configs))
+
+ expected_msg_template = ('Ignoring file (%s) while scanning '
+ 'domain config directory')
+ self.assertThat(
+ self.logging.output,
+ matchers.Contains(expected_msg_template % 'file.txt'))
+ self.assertThat(
+ self.logging.output,
+ matchers.Contains(expected_msg_template % 'keystone.conf'))
diff --git a/keystone-moon/keystone/tests/unit/test_config.py b/keystone-moon/keystone/tests/unit/test_config.py
index 7984646d..d7e7809f 100644
--- a/keystone-moon/keystone/tests/unit/test_config.py
+++ b/keystone-moon/keystone/tests/unit/test_config.py
@@ -16,7 +16,7 @@ import uuid
from oslo_config import cfg
-from keystone import config
+from keystone.common import config
from keystone import exception
from keystone.tests import unit
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py b/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py
index 18c76dad..c9706da7 100644
--- a/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py
+++ b/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py
@@ -27,9 +27,9 @@ class S3ContribCore(unit.TestCase):
self.controller = s3.S3Controller()
- def test_good_signature(self):
+ def test_good_signature_v1(self):
creds_ref = {'secret':
- 'b121dd41cdcc42fe9f70e572e84295aa'}
+ u'b121dd41cdcc42fe9f70e572e84295aa'}
credentials = {'token':
'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
@@ -40,9 +40,9 @@ class S3ContribCore(unit.TestCase):
self.assertIsNone(self.controller.check_signature(creds_ref,
credentials))
- def test_bad_signature(self):
+ def test_bad_signature_v1(self):
creds_ref = {'secret':
- 'b121dd41cdcc42fe9f70e572e84295aa'}
+ u'b121dd41cdcc42fe9f70e572e84295aa'}
credentials = {'token':
'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB'
'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM'
@@ -53,3 +53,51 @@ class S3ContribCore(unit.TestCase):
self.assertRaises(exception.Unauthorized,
self.controller.check_signature,
creds_ref, credentials)
+
+ def test_good_signature_v4(self):
+ creds_ref = {'secret':
+ u'e7a7a2240136494986991a6598d9fb9f'}
+ credentials = {'token':
+ 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw'
+ 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy'
+ 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1'
+ 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==',
+ 'signature':
+ '730ba8f58df6ffeadd78f402e990b2910d60'
+ 'bc5c2aec63619734f096a4dd77be'}
+
+ self.assertIsNone(self.controller.check_signature(creds_ref,
+ credentials))
+
+ def test_bad_signature_v4(self):
+ creds_ref = {'secret':
+ u'e7a7a2240136494986991a6598d9fb9f'}
+ credentials = {'token':
+ 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw'
+ 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy'
+ 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1'
+ 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==',
+ 'signature': uuid.uuid4().hex}
+
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ creds_ref, credentials)
+
+ def test_bad_token_v4(self):
+ creds_ref = {'secret':
+ u'e7a7a2240136494986991a6598d9fb9f'}
+ # token has invalid format of first part
+ credentials = {'token':
+ 'QVdTNC1BQUEKWApYClg=',
+ 'signature': ''}
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ creds_ref, credentials)
+
+ # token has invalid format of scope
+ credentials = {'token':
+ 'QVdTNC1ITUFDLVNIQTI1NgpYCi8vczMvYXdzTl9yZXF1ZXN0Clg=',
+ 'signature': ''}
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ creds_ref, credentials)
diff --git a/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py b/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py
index 8664e2c3..111aa5c6 100644
--- a/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py
+++ b/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py
@@ -12,13 +12,13 @@
import uuid
+from six.moves import http_client
+
from keystone.tests.unit import test_v3
class BaseTestCase(test_v3.RestfulTestCase):
- EXTENSION_TO_ADD = 'simple_cert_extension'
-
CA_PATH = '/v3/OS-SIMPLE-CERT/ca'
CERT_PATH = '/v3/OS-SIMPLE-CERT/certificates'
@@ -31,10 +31,10 @@ class TestSimpleCert(BaseTestCase):
method='GET',
path=path,
headers={'Accept': content_type},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertEqual(content_type, response.content_type.lower())
- self.assertIn('---BEGIN', response.body)
+ self.assertIn(b'---BEGIN', response.body)
return response
@@ -54,4 +54,4 @@ class TestSimpleCert(BaseTestCase):
self.request(app=self.public_app,
method='GET',
path=path,
- expected_status=500)
+ expected_status=http_client.INTERNAL_SERVER_ERROR)
diff --git a/keystone-moon/keystone/tests/unit/test_credential.py b/keystone-moon/keystone/tests/unit/test_credential.py
new file mode 100644
index 00000000..e917ef71
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_credential.py
@@ -0,0 +1,265 @@
+# Copyright 2015 UnitedStack, Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from keystoneclient.contrib.ec2 import utils as ec2_utils
+from six.moves import http_client
+
+from keystone.common import utils
+from keystone.contrib.ec2 import controllers
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit import default_fixtures
+from keystone.tests.unit.ksfixtures import database
+from keystone.tests.unit import rest
+
+CRED_TYPE_EC2 = controllers.CRED_TYPE_EC2
+
+
+class V2CredentialEc2TestCase(rest.RestfulTestCase):
+ def setUp(self):
+ super(V2CredentialEc2TestCase, self).setUp()
+ self.user_id = self.user_foo['id']
+ self.project_id = self.tenant_bar['id']
+
+ def _get_token_id(self, r):
+ return r.result['access']['token']['id']
+
+ def _get_ec2_cred(self):
+ uri = self._get_ec2_cred_uri()
+ r = self.public_request(method='POST', token=self.get_scoped_token(),
+ path=uri, body={'tenant_id': self.project_id})
+ return r.result['credential']
+
+ def _get_ec2_cred_uri(self):
+ return '/v2.0/users/%s/credentials/OS-EC2' % self.user_id
+
+ def test_ec2_cannot_get_non_ec2_credential(self):
+ access_key = uuid.uuid4().hex
+ cred_id = utils.hash_access_key(access_key)
+ non_ec2_cred = unit.new_credential_ref(
+ user_id=self.user_id,
+ project_id=self.project_id)
+ non_ec2_cred['id'] = cred_id
+ self.credential_api.create_credential(cred_id, non_ec2_cred)
+
+ # if access_key is not found, ec2 controller raises Unauthorized
+ # exception
+ path = '/'.join([self._get_ec2_cred_uri(), access_key])
+ self.public_request(method='GET', token=self.get_scoped_token(),
+ path=path,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def assertValidErrorResponse(self, r):
+ # FIXME(wwwjfy): it's copied from test_v3.py. The logic of this method
+ # in test_v2.py and test_v3.py (both are inherited from rest.py) has no
+ # difference, so they should be refactored into one place. Also, the
+ # function signatures in both files don't match the one in the parent
+ # class in rest.py.
+ resp = r.result
+ self.assertIsNotNone(resp.get('error'))
+ self.assertIsNotNone(resp['error'].get('code'))
+ self.assertIsNotNone(resp['error'].get('title'))
+ self.assertIsNotNone(resp['error'].get('message'))
+ self.assertEqual(int(resp['error']['code']), r.status_code)
+
+ def test_ec2_list_credentials(self):
+ self._get_ec2_cred()
+ uri = self._get_ec2_cred_uri()
+ r = self.public_request(method='GET', token=self.get_scoped_token(),
+ path=uri)
+ cred_list = r.result['credentials']
+ self.assertEqual(1, len(cred_list))
+
+ # non-EC2 credentials won't be fetched
+ non_ec2_cred = unit.new_credential_ref(
+ user_id=self.user_id,
+ project_id=self.project_id)
+ non_ec2_cred['type'] = uuid.uuid4().hex
+ self.credential_api.create_credential(non_ec2_cred['id'],
+ non_ec2_cred)
+ r = self.public_request(method='GET', token=self.get_scoped_token(),
+ path=uri)
+ cred_list_2 = r.result['credentials']
+ # still one element because non-EC2 credentials are not returned.
+ self.assertEqual(1, len(cred_list_2))
+ self.assertEqual(cred_list[0], cred_list_2[0])
+
+
+class V2CredentialEc2Controller(unit.TestCase):
+ def setUp(self):
+ super(V2CredentialEc2Controller, self).setUp()
+ self.useFixture(database.Database())
+ self.load_backends()
+ self.load_fixtures(default_fixtures)
+ self.user_id = self.user_foo['id']
+ self.project_id = self.tenant_bar['id']
+ self.controller = controllers.Ec2Controller()
+ self.blob, tmp_ref = unit.new_ec2_credential(
+ user_id=self.user_id,
+ project_id=self.project_id)
+
+ self.creds_ref = (controllers.Ec2Controller
+ ._convert_v3_to_ec2_credential(tmp_ref))
+
+ def test_signature_validate_no_host_port(self):
+ """Test signature validation with the access/secret provided."""
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ request = {'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(request)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ self.assertTrue(self.controller.check_signature(self.creds_ref,
+ sig_ref))
+
+ def test_signature_validate_with_host_port(self):
+ """Test signature validation when host is bound with port.
+
+ Host is bound with a port, generally, the port here is not the
+ standard port for the protocol, like '80' for HTTP and port 443
+ for HTTPS, the port is not omitted by the client library.
+ """
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ request = {'host': 'foo:8181',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(request)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo:8181',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ self.assertTrue(self.controller.check_signature(self.creds_ref,
+ sig_ref))
+
+ def test_signature_validate_with_missed_host_port(self):
+ """Test signature validation when host is bound with well-known port.
+
+ Host is bound with a port, but the port is well-know port like '80'
+ for HTTP and port 443 for HTTPS, sometimes, client library omit
+ the port but then make the request with the port.
+ see (How to create the string to sign): 'http://docs.aws.amazon.com/
+ general/latest/gr/signature-version-2.html'.
+
+ Since "credentials['host']" is not set by client library but is
+ taken from "req.host", so caused the differences.
+ """
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ # Omit the port to generate the signature.
+ cnt_req = {'host': 'foo',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(cnt_req)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo:8080',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ # Check the signature again after omitting the port.
+ self.assertTrue(self.controller.check_signature(self.creds_ref,
+ sig_ref))
+
+ def test_signature_validate_no_signature(self):
+ """Signature is not presented in signature reference data."""
+ access = self.blob['access']
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+
+ sig_ref = {'access': access,
+ 'signature': None,
+ 'host': 'foo:8080',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ self.creds_ref, sig_ref)
+
+ def test_signature_validate_invalid_signature(self):
+ """Signature is not signed on the correct data."""
+ access = self.blob['access']
+ secret = self.blob['secret']
+ signer = ec2_utils.Ec2Signer(secret)
+ params = {'SignatureMethod': 'HmacSHA256',
+ 'SignatureVersion': '2',
+ 'AWSAccessKeyId': access}
+ request = {'host': 'bar',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+ signature = signer.generate(request)
+
+ sig_ref = {'access': access,
+ 'signature': signature,
+ 'host': 'foo:8080',
+ 'verb': 'GET',
+ 'path': '/bar',
+ 'params': params}
+
+ # Now validate the signature based on the dummy request
+ self.assertRaises(exception.Unauthorized,
+ self.controller.check_signature,
+ self.creds_ref, sig_ref)
+
+ def test_check_non_admin_user(self):
+ """Checking if user is admin causes uncaught error.
+
+ When checking if a user is an admin, keystone.exception.Unauthorized
+ is raised but not caught if the user is not an admin.
+ """
+ # make a non-admin user
+ context = {'is_admin': False, 'token_id': uuid.uuid4().hex}
+
+ # check if user is admin
+ # no exceptions should be raised
+ self.controller._is_admin(context)
diff --git a/keystone-moon/keystone/tests/unit/test_driver_hints.py b/keystone-moon/keystone/tests/unit/test_driver_hints.py
index c20d2ae7..75d76194 100644
--- a/keystone-moon/keystone/tests/unit/test_driver_hints.py
+++ b/keystone-moon/keystone/tests/unit/test_driver_hints.py
@@ -27,7 +27,7 @@ class ListHintsTests(test.TestCase):
self.assertEqual('t1', filter['name'])
self.assertEqual('data1', filter['value'])
self.assertEqual('equals', filter['comparator'])
- self.assertEqual(False, filter['case_sensitive'])
+ self.assertFalse(filter['case_sensitive'])
hints.filters.remove(filter)
filter_count = 0
diff --git a/keystone-moon/keystone/tests/unit/test_entry_points.py b/keystone-moon/keystone/tests/unit/test_entry_points.py
new file mode 100644
index 00000000..e973e942
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_entry_points.py
@@ -0,0 +1,48 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import stevedore
+from testtools import matchers
+
+from keystone.tests.unit import core as test
+
+
+class TestPasteDeploymentEntryPoints(test.TestCase):
+ def test_entry_point_middleware(self):
+ """Assert that our list of expected middleware is present."""
+ expected_names = [
+ 'admin_token_auth',
+ 'build_auth_context',
+ 'crud_extension',
+ 'cors',
+ 'debug',
+ 'endpoint_filter_extension',
+ 'ec2_extension',
+ 'ec2_extension_v3',
+ 'federation_extension',
+ 'json_body',
+ 'oauth1_extension',
+ 'request_id',
+ 'revoke_extension',
+ 's3_extension',
+ 'simple_cert_extension',
+ 'sizelimit',
+ 'token_auth',
+ 'url_normalize',
+ 'user_crud_extension',
+ ]
+
+ em = stevedore.ExtensionManager('paste.filter_factory')
+
+ actual_names = [extension.name for extension in em]
+
+ self.assertThat(actual_names, matchers.ContainsAll(expected_names))
diff --git a/keystone-moon/keystone/tests/unit/test_exception.py b/keystone-moon/keystone/tests/unit/test_exception.py
index 4d602ccc..25ca2c09 100644
--- a/keystone-moon/keystone/tests/unit/test_exception.py
+++ b/keystone-moon/keystone/tests/unit/test_exception.py
@@ -67,7 +67,7 @@ class ExceptionTestCase(unit.BaseTestCase):
self.assertValidJsonRendering(e)
self.assertIn(target, six.text_type(e))
- def test_403_title(self):
+ def test_forbidden_title(self):
e = exception.Forbidden()
resp = wsgi.render_exception(e)
j = jsonutils.loads(resp.body)
@@ -123,7 +123,7 @@ class UnexpectedExceptionTestCase(ExceptionTestCase):
self.assertNotIn(self.exc_str, six.text_type(e))
def test_unexpected_error_debug(self):
- self.config_fixture.config(debug=True)
+ self.config_fixture.config(debug=True, insecure_debug=True)
e = exception.UnexpectedError(exception=self.exc_str)
self.assertIn(self.exc_str, six.text_type(e))
@@ -131,32 +131,48 @@ class UnexpectedExceptionTestCase(ExceptionTestCase):
self.config_fixture.config(debug=False)
e = UnexpectedExceptionTestCase.SubClassExc(
debug_info=self.exc_str)
- self.assertEqual(exception.UnexpectedError._message_format,
+ self.assertEqual(exception.UnexpectedError.message_format,
six.text_type(e))
def test_unexpected_error_subclass_debug(self):
- self.config_fixture.config(debug=True)
+ self.config_fixture.config(debug=True, insecure_debug=True)
subclass = self.SubClassExc
e = subclass(debug_info=self.exc_str)
expected = subclass.debug_message_format % {'debug_info': self.exc_str}
- translated_amendment = six.text_type(exception.SecurityError.amendment)
self.assertEqual(
- expected + six.text_type(' ') + translated_amendment,
+ '%s %s' % (expected, exception.SecurityError.amendment),
six.text_type(e))
def test_unexpected_error_custom_message_no_debug(self):
self.config_fixture.config(debug=False)
e = exception.UnexpectedError(self.exc_str)
- self.assertEqual(exception.UnexpectedError._message_format,
+ self.assertEqual(exception.UnexpectedError.message_format,
six.text_type(e))
def test_unexpected_error_custom_message_debug(self):
- self.config_fixture.config(debug=True)
+ self.config_fixture.config(debug=True, insecure_debug=True)
e = exception.UnexpectedError(self.exc_str)
- translated_amendment = six.text_type(exception.SecurityError.amendment)
self.assertEqual(
- self.exc_str + six.text_type(' ') + translated_amendment,
+ '%s %s' % (self.exc_str, exception.SecurityError.amendment),
+ six.text_type(e))
+
+ def test_unexpected_error_custom_message_exception_debug(self):
+ self.config_fixture.config(debug=True, insecure_debug=True)
+ orig_e = exception.NotFound(target=uuid.uuid4().hex)
+ e = exception.UnexpectedError(orig_e)
+ self.assertEqual(
+ '%s %s' % (six.text_type(orig_e),
+ exception.SecurityError.amendment),
+ six.text_type(e))
+
+ def test_unexpected_error_custom_message_binary_debug(self):
+ self.config_fixture.config(debug=True, insecure_debug=True)
+ binary_msg = b'something'
+ e = exception.UnexpectedError(binary_msg)
+ self.assertEqual(
+ '%s %s' % (six.text_type(binary_msg),
+ exception.SecurityError.amendment),
six.text_type(e))
@@ -176,7 +192,7 @@ class SecurityErrorTestCase(ExceptionTestCase):
self.assertNotIn(risky_info, six.text_type(e))
def test_unauthorized_exposure_in_debug(self):
- self.config_fixture.config(debug=True)
+ self.config_fixture.config(debug=True, insecure_debug=True)
risky_info = uuid.uuid4().hex
e = exception.Unauthorized(message=risky_info)
@@ -192,7 +208,7 @@ class SecurityErrorTestCase(ExceptionTestCase):
self.assertNotIn(risky_info, six.text_type(e))
def test_forbidden_exposure_in_debug(self):
- self.config_fixture.config(debug=True)
+ self.config_fixture.config(debug=True, insecure_debug=True)
risky_info = uuid.uuid4().hex
e = exception.Forbidden(message=risky_info)
@@ -208,23 +224,45 @@ class SecurityErrorTestCase(ExceptionTestCase):
self.assertValidJsonRendering(e)
self.assertNotIn(risky_info, six.text_type(e))
self.assertIn(action, six.text_type(e))
+ self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
- e = exception.ForbiddenAction(action=risky_info)
+ e = exception.ForbiddenAction(action=action)
self.assertValidJsonRendering(e)
- self.assertIn(risky_info, six.text_type(e))
+ self.assertIn(action, six.text_type(e))
+ self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
def test_forbidden_action_exposure_in_debug(self):
- self.config_fixture.config(debug=True)
+ self.config_fixture.config(debug=True, insecure_debug=True)
risky_info = uuid.uuid4().hex
+ action = uuid.uuid4().hex
- e = exception.ForbiddenAction(message=risky_info)
+ e = exception.ForbiddenAction(message=risky_info, action=action)
self.assertValidJsonRendering(e)
self.assertIn(risky_info, six.text_type(e))
+ self.assertIn(exception.SecurityError.amendment, six.text_type(e))
- e = exception.ForbiddenAction(action=risky_info)
+ e = exception.ForbiddenAction(action=action)
self.assertValidJsonRendering(e)
- self.assertIn(risky_info, six.text_type(e))
+ self.assertIn(action, six.text_type(e))
+ self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
+
+ def test_forbidden_action_no_message(self):
+ # When no custom message is given when the ForbiddenAction (or other
+ # SecurityError subclass) is created the exposed message is the same
+ # whether debug is enabled or not.
+
+ action = uuid.uuid4().hex
+
+ self.config_fixture.config(debug=False)
+ e = exception.ForbiddenAction(action=action)
+ exposed_message = six.text_type(e)
+ self.assertIn(action, exposed_message)
+ self.assertNotIn(exception.SecurityError.amendment, six.text_type(e))
+
+ self.config_fixture.config(debug=True)
+ e = exception.ForbiddenAction(action=action)
+ self.assertEqual(exposed_message, six.text_type(e))
def test_unicode_argument_message(self):
self.config_fixture.config(debug=False)
diff --git a/keystone-moon/keystone/tests/unit/test_hacking_checks.py b/keystone-moon/keystone/tests/unit/test_hacking_checks.py
index 962f5f8a..e279cc7f 100644
--- a/keystone-moon/keystone/tests/unit/test_hacking_checks.py
+++ b/keystone-moon/keystone/tests/unit/test_hacking_checks.py
@@ -86,25 +86,44 @@ class TestAssertingNoneEquality(BaseStyleCheck):
self.assert_has_errors(code, expected_errors=errors)
-class TestCheckForDebugLoggingIssues(BaseStyleCheck):
+class BaseLoggingCheck(BaseStyleCheck):
def get_checker(self):
return checks.CheckForLoggingIssues
+ def get_fixture(self):
+ return hacking_fixtures.HackingLogging()
+
+ def assert_has_errors(self, code, expected_errors=None):
+
+ # pull out the parts of the error that we'll match against
+ actual_errors = (e[:3] for e in self.run_check(code))
+ # adjust line numbers to make the fixture data more readable.
+ import_lines = len(self.code_ex.shared_imports.split('\n')) - 1
+ actual_errors = [(e[0] - import_lines, e[1], e[2])
+ for e in actual_errors]
+ self.assertEqual(expected_errors or [], actual_errors)
+
+
+class TestCheckForDebugLoggingIssues(BaseLoggingCheck):
+
def test_for_translations(self):
fixture = self.code_ex.assert_no_translations_for_debug_logging
- code = fixture['code']
+ code = self.code_ex.shared_imports + fixture['code']
errors = fixture['expected_errors']
self.assert_has_errors(code, expected_errors=errors)
-class TestCheckForNonDebugLoggingIssues(BaseStyleCheck):
+class TestLoggingWithWarn(BaseLoggingCheck):
- def get_checker(self):
- return checks.CheckForLoggingIssues
+ def test(self):
+ data = self.code_ex.assert_not_using_deprecated_warn
+ code = self.code_ex.shared_imports + data['code']
+ errors = data['expected_errors']
+ self.assert_has_errors(code, expected_errors=errors)
- def get_fixture(self):
- return hacking_fixtures.HackingLogging()
+
+class TestCheckForNonDebugLoggingIssues(BaseLoggingCheck):
def test_for_translations(self):
for example in self.code_ex.examples:
@@ -112,15 +131,6 @@ class TestCheckForNonDebugLoggingIssues(BaseStyleCheck):
errors = example['expected_errors']
self.assert_has_errors(code, expected_errors=errors)
- def assert_has_errors(self, code, expected_errors=None):
- # pull out the parts of the error that we'll match against
- actual_errors = (e[:3] for e in self.run_check(code))
- # adjust line numbers to make the fixure data more readable.
- import_lines = len(self.code_ex.shared_imports.split('\n')) - 1
- actual_errors = [(e[0] - import_lines, e[1], e[2])
- for e in actual_errors]
- self.assertEqual(expected_errors or [], actual_errors)
-
class TestDictConstructorWithSequenceCopy(BaseStyleCheck):
diff --git a/keystone-moon/keystone/tests/unit/test_kvs.py b/keystone-moon/keystone/tests/unit/test_kvs.py
index 18931f5d..a88ee1ac 100644
--- a/keystone-moon/keystone/tests/unit/test_kvs.py
+++ b/keystone-moon/keystone/tests/unit/test_kvs.py
@@ -17,7 +17,6 @@ import uuid
from dogpile.cache import api
from dogpile.cache import proxy
-from dogpile.cache import util
import mock
import six
from testtools import matchers
@@ -86,9 +85,12 @@ class RegionProxy2Fixture(proxy.ProxyBackend):
class TestMemcacheDriver(api.CacheBackend):
- """A test dogpile.cache backend that conforms to the mixin-mechanism for
+ """A test dogpile.cache backend.
+
+ This test backend conforms to the mixin-mechanism for
overriding set and set_multi methods on dogpile memcached drivers.
"""
+
class test_client(object):
# FIXME(morganfainberg): Convert this test client over to using mock
# and/or mock.MagicMock as appropriate
@@ -203,10 +205,10 @@ class KVSTest(unit.TestCase):
kvs = self._get_kvs_region()
kvs.configure('openstack.kvs.Memory')
- self.assertIs(kvs._region.key_mangler, util.sha1_mangle_key)
+ self.assertIs(kvs._region.key_mangler, core.sha1_mangle_key)
# The backend should also have the keymangler set the same as the
# region now.
- self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+ self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
def test_kvs_key_mangler_configuration_backend(self):
kvs = self._get_kvs_region()
@@ -217,7 +219,7 @@ class KVSTest(unit.TestCase):
def test_kvs_key_mangler_configuration_forced_backend(self):
kvs = self._get_kvs_region()
kvs.configure('openstack.kvs.KVSBackendForcedKeyMangleFixture',
- key_mangler=util.sha1_mangle_key)
+ key_mangler=core.sha1_mangle_key)
expected = KVSBackendForcedKeyMangleFixture.key_mangler(self.key_foo)
self.assertEqual(expected, kvs._region.key_mangler(self.key_foo))
@@ -236,7 +238,7 @@ class KVSTest(unit.TestCase):
kvs = self._get_kvs_region()
kvs.configure('openstack.kvs.Memory')
- self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+ self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
kvs._set_key_mangler(test_key_mangler)
self.assertIs(kvs._region.backend.key_mangler, test_key_mangler)
@@ -432,7 +434,7 @@ class KVSTest(unit.TestCase):
no_expiry_keys=no_expiry_keys)
calculated_keys = set([kvs._region.key_mangler(key)
for key in no_expiry_keys])
- self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+ self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
self.assertSetEqual(calculated_keys,
kvs._region.backend.no_expiry_hashed_keys)
self.assertSetEqual(no_expiry_keys,
@@ -450,7 +452,7 @@ class KVSTest(unit.TestCase):
kvs.configure('openstack.kvs.Memcached',
memcached_backend='TestDriver',
no_expiry_keys=no_expiry_keys)
- self.assertIs(kvs._region.backend.key_mangler, util.sha1_mangle_key)
+ self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key)
kvs._region.backend.key_mangler = None
self.assertSetEqual(kvs._region.backend.raw_no_expiry_keys,
kvs._region.backend.no_expiry_hashed_keys)
@@ -492,15 +494,15 @@ class KVSTest(unit.TestCase):
# Ensure the set_arguments are correct
self.assertDictEqual(
- kvs._region.backend._get_set_arguments_driver_attr(),
- expected_set_args)
+ expected_set_args,
+ kvs._region.backend._get_set_arguments_driver_attr())
# Set a key that would have an expiry and verify the correct result
# occurred and that the correct set_arguments were passed.
kvs.set(self.key_foo, self.value_foo)
self.assertDictEqual(
- kvs._region.backend.driver.client.set_arguments_passed,
- expected_set_args)
+ expected_set_args,
+ kvs._region.backend.driver.client.set_arguments_passed)
observed_foo_keys = list(kvs_driver.client.keys_values.keys())
self.assertEqual(expected_foo_keys, observed_foo_keys)
self.assertEqual(
@@ -511,8 +513,8 @@ class KVSTest(unit.TestCase):
# occurred and that the correct set_arguments were passed.
kvs.set(self.key_bar, self.value_bar)
self.assertDictEqual(
- kvs._region.backend.driver.client.set_arguments_passed,
- expected_no_expiry_args)
+ expected_no_expiry_args,
+ kvs._region.backend.driver.client.set_arguments_passed)
observed_bar_keys = list(kvs_driver.client.keys_values.keys())
self.assertEqual(expected_bar_keys, observed_bar_keys)
self.assertEqual(
@@ -523,8 +525,8 @@ class KVSTest(unit.TestCase):
# result occurred and that the correct set_arguments were passed.
kvs.set_multi(mapping_foo)
self.assertDictEqual(
- kvs._region.backend.driver.client.set_arguments_passed,
- expected_set_args)
+ expected_set_args,
+ kvs._region.backend.driver.client.set_arguments_passed)
observed_foo_keys = list(kvs_driver.client.keys_values.keys())
self.assertEqual(expected_foo_keys, observed_foo_keys)
self.assertEqual(
@@ -535,8 +537,8 @@ class KVSTest(unit.TestCase):
# result occurred and that the correct set_arguments were passed.
kvs.set_multi(mapping_bar)
self.assertDictEqual(
- kvs._region.backend.driver.client.set_arguments_passed,
- expected_no_expiry_args)
+ expected_no_expiry_args,
+ kvs._region.backend.driver.client.set_arguments_passed)
observed_bar_keys = list(kvs_driver.client.keys_values.keys())
self.assertEqual(expected_bar_keys, observed_bar_keys)
self.assertEqual(
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
index e2abd56d..4bce6a73 100644
--- a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
+++ b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py
@@ -69,9 +69,6 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
create_object(CONF.ldap.role_tree_dn,
{'objectclass': 'organizationalUnit',
'ou': 'Roles'})
- create_object(CONF.ldap.project_tree_dn,
- {'objectclass': 'organizationalUnit',
- 'ou': 'Projects'})
create_object(CONF.ldap.group_tree_dn,
{'objectclass': 'organizationalUnit',
'ou': 'UserGroups'})
@@ -82,8 +79,7 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
return config_files
def test_build_tree(self):
- """Regression test for building the tree names
- """
+ """Regression test for building the tree names."""
# logic is different from the fake backend.
user_api = identity_ldap.UserApi(CONF)
self.assertTrue(user_api)
@@ -134,6 +130,7 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
USER_COUNT = 2
for x in range(0, USER_COUNT):
+ # TODO(shaleh): use unit.new_user_ref()
new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'enabled': True, 'domain_id': domain['id']}
new_user = self.identity_api.create_user(new_user)
@@ -147,8 +144,7 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity):
self.assertEqual(0, len(group_refs))
for x in range(0, GROUP_COUNT):
- new_group = {'domain_id': domain['id'],
- 'name': uuid.uuid4().hex}
+ new_group = unit.new_group_ref(domain_id=domain['id'])
new_group = self.identity_api.create_group(new_group)
test_groups.append(new_group)
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
index 81e91ce5..a284114a 100644
--- a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
+++ b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py
@@ -105,6 +105,7 @@ class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin,
password=old_password)
def _create_user_and_authenticate(self, password):
+ # TODO(shaleh): port to new_user_ref()
user_dict = {
'domain_id': CONF.identity.default_domain_id,
'name': uuid.uuid4().hex,
@@ -183,7 +184,7 @@ class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin,
user_ref = self.identity_api.authenticate(
context={}, user_id=user['id'], password=old_password)
- self.assertDictEqual(user_ref, user)
+ self.assertDictEqual(user, user_ref)
def test_password_change_with_auth_pool_enabled_no_lifetime(self):
self.config_fixture.config(group='ldap',
diff --git a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
index 6b47bfd9..98e2882d 100644
--- a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
+++ b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py
@@ -50,6 +50,7 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity):
tls_req_cert='demand')
self.identity_api = identity.backends.ldap.Identity()
+ # TODO(shaleh): use new_user_ref()
user = {'name': 'fake1',
'password': 'fakepass1',
'tenants': ['bar']}
@@ -71,6 +72,7 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity):
tls_req_cert='demand')
self.identity_api = identity.backends.ldap.Identity()
+ # TODO(shaleh): use new_user_ref()
user = {'id': 'fake1',
'name': 'fake1',
'password': 'fakepass1',
@@ -95,6 +97,7 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity):
tls_cacertdir=None)
self.identity_api = identity.backends.ldap.Identity()
+ # TODO(shaleh): use new_user_ref()
user = {'name': 'fake1',
'password': 'fakepass1',
'tenants': ['bar']}
@@ -109,6 +112,7 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity):
tls_cacertdir='/etc/keystone/ssl/mythicalcertdir')
self.identity_api = identity.backends.ldap.Identity()
+ # TODO(shaleh): use new_user_ref()
user = {'name': 'fake1',
'password': 'fakepass1',
'tenants': ['bar']}
diff --git a/keystone-moon/keystone/tests/unit/test_middleware.py b/keystone-moon/keystone/tests/unit/test_middleware.py
index 0eedb9c6..d33e8c00 100644
--- a/keystone-moon/keystone/tests/unit/test_middleware.py
+++ b/keystone-moon/keystone/tests/unit/test_middleware.py
@@ -12,17 +12,18 @@
# License for the specific language governing permissions and limitations
# under the License.
+import copy
import hashlib
import uuid
from oslo_config import cfg
from six.moves import http_client
-import webob
+import webtest
from keystone.common import authorization
from keystone.common import tokenless_auth
-from keystone.contrib.federation import constants as federation_constants
from keystone import exception
+from keystone.federation import constants as federation_constants
from keystone import middleware
from keystone.tests import unit
from keystone.tests.unit import mapping_fixtures
@@ -32,104 +33,158 @@ from keystone.tests.unit import test_backend_sql
CONF = cfg.CONF
-def make_request(**kwargs):
- accept = kwargs.pop('accept', None)
- method = kwargs.pop('method', 'GET')
- body = kwargs.pop('body', None)
- req = webob.Request.blank('/', **kwargs)
- req.method = method
- if body is not None:
- req.body = body
- if accept is not None:
- req.accept = accept
- return req
+class MiddlewareRequestTestBase(unit.TestCase):
+ MIDDLEWARE_CLASS = None # override this in subclasses
-def make_response(**kwargs):
- body = kwargs.pop('body', None)
- return webob.Response(body)
+ def _application(self):
+ """A base wsgi application that returns a simple response."""
+ def app(environ, start_response):
+ # WSGI requires the body of the response to be six.binary_type
+ body = uuid.uuid4().hex.encode('utf-8')
+ resp_headers = [('Content-Type', 'text/html; charset=utf8'),
+ ('Content-Length', str(len(body)))]
+ start_response('200 OK', resp_headers)
+ return [body]
+ return app
+
+ def _generate_app_response(self, app, headers=None, method='get',
+ path='/', **kwargs):
+ """Given a wsgi application wrap it in webtest and call it."""
+ return getattr(webtest.TestApp(app), method)(path,
+ headers=headers or {},
+ **kwargs)
+
+ def _middleware_failure(self, exc, *args, **kwargs):
+ """Assert that an exception is being thrown from process_request."""
+ # NOTE(jamielennox): This is a little ugly. We need to call the webtest
+ # framework so that the correct RequestClass object is created for when
+ # we call process_request. However because we go via webtest we only
+ # see the response object and not the actual exception that is thrown
+ # by process_request. To get around this we subclass process_request
+ # with something that checks for the right type of exception being
+ # thrown so we can test the middle of the request process.
+ # TODO(jamielennox): Change these tests to test the value of the
+ # response rather than the error that is raised.
+
+ class _Failing(self.MIDDLEWARE_CLASS):
+
+ _called = False
+
+ def process_request(i_self, *i_args, **i_kwargs):
+ # i_ to distinguish it from and not clobber the outer vars
+ e = self.assertRaises(exc,
+ super(_Failing, i_self).process_request,
+ *i_args, **i_kwargs)
+ i_self._called = True
+ raise e
+
+ # by default the returned status when an uncaught exception is raised
+ # for validation or caught errors this will likely be 400
+ kwargs.setdefault('status', http_client.INTERNAL_SERVER_ERROR) # 500
+
+ app = _Failing(self._application())
+ resp = self._generate_app_response(app, *args, **kwargs)
+ self.assertTrue(app._called)
+ return resp
+
+ def _do_middleware_response(self, *args, **kwargs):
+ """Wrap a middleware around a sample application and call it."""
+ app = self.MIDDLEWARE_CLASS(self._application())
+ return self._generate_app_response(app, *args, **kwargs)
+
+ def _do_middleware_request(self, *args, **kwargs):
+ """The request object from a successful middleware call."""
+ return self._do_middleware_response(*args, **kwargs).request
+
+
+class TokenAuthMiddlewareTest(MiddlewareRequestTestBase):
+
+ MIDDLEWARE_CLASS = middleware.TokenAuthMiddleware
-class TokenAuthMiddlewareTest(unit.TestCase):
def test_request(self):
- req = make_request()
- req.headers[middleware.AUTH_TOKEN_HEADER] = 'MAGIC'
- middleware.TokenAuthMiddleware(None).process_request(req)
+ headers = {middleware.AUTH_TOKEN_HEADER: 'MAGIC'}
+ req = self._do_middleware_request(headers=headers)
context = req.environ[middleware.CONTEXT_ENV]
self.assertEqual('MAGIC', context['token_id'])
-class AdminTokenAuthMiddlewareTest(unit.TestCase):
+class AdminTokenAuthMiddlewareTest(MiddlewareRequestTestBase):
+
+ MIDDLEWARE_CLASS = middleware.AdminTokenAuthMiddleware
+
+ def config_overrides(self):
+ super(AdminTokenAuthMiddlewareTest, self).config_overrides()
+ self.config_fixture.config(
+ admin_token='ADMIN')
+
def test_request_admin(self):
- req = make_request()
- req.headers[middleware.AUTH_TOKEN_HEADER] = CONF.admin_token
- middleware.AdminTokenAuthMiddleware(None).process_request(req)
- context = req.environ[middleware.CONTEXT_ENV]
- self.assertTrue(context['is_admin'])
+ headers = {middleware.AUTH_TOKEN_HEADER: 'ADMIN'}
+ req = self._do_middleware_request(headers=headers)
+ self.assertTrue(req.environ[middleware.CONTEXT_ENV]['is_admin'])
def test_request_non_admin(self):
- req = make_request()
- req.headers[middleware.AUTH_TOKEN_HEADER] = 'NOT-ADMIN'
- middleware.AdminTokenAuthMiddleware(None).process_request(req)
- context = req.environ[middleware.CONTEXT_ENV]
- self.assertFalse(context['is_admin'])
+ headers = {middleware.AUTH_TOKEN_HEADER: 'NOT-ADMIN'}
+ req = self._do_middleware_request(headers=headers)
+ self.assertFalse(req.environ[middleware.CONTEXT_ENV]['is_admin'])
-class PostParamsMiddlewareTest(unit.TestCase):
- def test_request_with_params(self):
- req = make_request(body="arg1=one", method='POST')
- middleware.PostParamsMiddleware(None).process_request(req)
- params = req.environ[middleware.PARAMS_ENV]
- self.assertEqual({"arg1": "one"}, params)
+class JsonBodyMiddlewareTest(MiddlewareRequestTestBase):
+ MIDDLEWARE_CLASS = middleware.JsonBodyMiddleware
-class JsonBodyMiddlewareTest(unit.TestCase):
def test_request_with_params(self):
- req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
- content_type='application/json',
- method='POST')
- middleware.JsonBodyMiddleware(None).process_request(req)
- params = req.environ[middleware.PARAMS_ENV]
- self.assertEqual({"arg1": "one", "arg2": ["a"]}, params)
+ headers = {'Content-Type': 'application/json'}
+ params = '{"arg1": "one", "arg2": ["a"]}'
+ req = self._do_middleware_request(params=params,
+ headers=headers,
+ method='post')
+ self.assertEqual({"arg1": "one", "arg2": ["a"]},
+ req.environ[middleware.PARAMS_ENV])
def test_malformed_json(self):
- req = make_request(body='{"arg1": "on',
- content_type='application/json',
- method='POST')
- resp = middleware.JsonBodyMiddleware(None).process_request(req)
- self.assertEqual(http_client.BAD_REQUEST, resp.status_int)
+ headers = {'Content-Type': 'application/json'}
+ self._do_middleware_response(params='{"arg1": "on',
+ headers=headers,
+ method='post',
+ status=http_client.BAD_REQUEST)
def test_not_dict_body(self):
- req = make_request(body='42',
- content_type='application/json',
- method='POST')
- resp = middleware.JsonBodyMiddleware(None).process_request(req)
- self.assertEqual(http_client.BAD_REQUEST, resp.status_int)
- self.assertTrue('valid JSON object' in resp.json['error']['message'])
+ headers = {'Content-Type': 'application/json'}
+ resp = self._do_middleware_response(params='42',
+ headers=headers,
+ method='post',
+ status=http_client.BAD_REQUEST)
+
+ self.assertIn('valid JSON object', resp.json['error']['message'])
def test_no_content_type(self):
- req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
- method='POST')
- middleware.JsonBodyMiddleware(None).process_request(req)
- params = req.environ[middleware.PARAMS_ENV]
- self.assertEqual({"arg1": "one", "arg2": ["a"]}, params)
+ headers = {'Content-Type': ''}
+ params = '{"arg1": "one", "arg2": ["a"]}'
+ req = self._do_middleware_request(params=params,
+ headers=headers,
+ method='post')
+ self.assertEqual({"arg1": "one", "arg2": ["a"]},
+ req.environ[middleware.PARAMS_ENV])
def test_unrecognized_content_type(self):
- req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
- content_type='text/plain',
- method='POST')
- resp = middleware.JsonBodyMiddleware(None).process_request(req)
- self.assertEqual(http_client.BAD_REQUEST, resp.status_int)
+ headers = {'Content-Type': 'text/plain'}
+ self._do_middleware_response(params='{"arg1": "one", "arg2": ["a"]}',
+ headers=headers,
+ method='post',
+ status=http_client.BAD_REQUEST)
def test_unrecognized_content_type_without_body(self):
- req = make_request(content_type='text/plain',
- method='GET')
- middleware.JsonBodyMiddleware(None).process_request(req)
- params = req.environ.get(middleware.PARAMS_ENV, {})
- self.assertEqual({}, params)
+ headers = {'Content-Type': 'text/plain'}
+ req = self._do_middleware_request(headers=headers)
+ self.assertEqual({}, req.environ.get(middleware.PARAMS_ENV, {}))
+
+class AuthContextMiddlewareTest(test_backend_sql.SqlTests,
+ MiddlewareRequestTestBase):
-class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
+ MIDDLEWARE_CLASS = middleware.AuthContextMiddleware
def setUp(self):
super(AuthContextMiddlewareTest, self).setUp()
@@ -139,55 +194,32 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
self.config_fixture.config(group='tokenless_auth',
trusted_issuer=[self.trusted_issuer])
- # This idp_id is calculated based on
- # sha256(self.client_issuer)
- hashed_idp = hashlib.sha256(self.client_issuer)
+ # client_issuer is encoded because you can't hash
+ # unicode objects with hashlib.
+ # This idp_id is calculated based on sha256(self.client_issuer)
+ hashed_idp = hashlib.sha256(self.client_issuer.encode('utf-8'))
self.idp_id = hashed_idp.hexdigest()
self._load_sample_data()
def _load_sample_data(self):
- self.domain_id = uuid.uuid4().hex
- self.domain_name = uuid.uuid4().hex
- self.project_id = uuid.uuid4().hex
- self.project_name = uuid.uuid4().hex
- self.user_name = uuid.uuid4().hex
- self.user_password = uuid.uuid4().hex
- self.user_email = uuid.uuid4().hex
self.protocol_id = 'x509'
- self.role_id = uuid.uuid4().hex
- self.role_name = uuid.uuid4().hex
- # for ephemeral user
- self.group_name = uuid.uuid4().hex
# 1) Create a domain for the user.
- self.domain = {
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'id': self.domain_id,
- 'name': self.domain_name,
- }
-
+ self.domain = unit.new_domain_ref()
+ self.domain_id = self.domain['id']
+ self.domain_name = self.domain['name']
self.resource_api.create_domain(self.domain_id, self.domain)
# 2) Create a project for the user.
- self.project = {
- 'description': uuid.uuid4().hex,
- 'domain_id': self.domain_id,
- 'enabled': True,
- 'id': self.project_id,
- 'name': self.project_name,
- }
+ self.project = unit.new_project_ref(domain_id=self.domain_id)
+ self.project_id = self.project['id']
+ self.project_name = self.project['name']
self.resource_api.create_project(self.project_id, self.project)
# 3) Create a user in new domain.
- self.user = {
- 'name': self.user_name,
- 'domain_id': self.domain_id,
- 'project_id': self.project_id,
- 'password': self.user_password,
- 'email': self.user_email,
- }
+ self.user = unit.new_user_ref(domain_id=self.domain_id,
+ project_id=self.project_id)
self.user = self.identity_api.create_user(self.user)
@@ -197,17 +229,13 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
self.idp)
# Add a role
- self.role = {
- 'id': self.role_id,
- 'name': self.role_name,
- }
+ self.role = unit.new_role_ref()
+ self.role_id = self.role['id']
+ self.role_name = self.role['name']
self.role_api.create_role(self.role_id, self.role)
# Add a group
- self.group = {
- 'name': self.group_name,
- 'domain_id': self.domain_id,
- }
+ self.group = unit.new_group_ref(domain_id=self.domain_id)
self.group = self.identity_api.create_group(self.group)
# Assign a role to the user on a project
@@ -282,7 +310,7 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
:param request: HTTP request
:param mapping_ref: A mapping in JSON structure will be setup in the
- backend DB for mapping an user or a group.
+ backend DB for mapping a user or a group.
:param exception_expected: Sets to True when an exception is expected
to raised based on the given arguments.
:returns: context an auth context contains user and role information
@@ -300,30 +328,27 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
return context
def test_context_already_exists(self):
- req = make_request()
- token_id = uuid.uuid4().hex
- req.environ[authorization.AUTH_CONTEXT_ENV] = {'token_id': token_id}
- context = self._create_context(request=req)
- self.assertEqual(token_id, context['token_id'])
+ stub_value = uuid.uuid4().hex
+ env = {authorization.AUTH_CONTEXT_ENV: stub_value}
+ req = self._do_middleware_request(extra_environ=env)
+ self.assertEqual(stub_value,
+ req.environ.get(authorization.AUTH_CONTEXT_ENV))
def test_not_applicable_to_token_request(self):
- env = {}
- env['PATH_INFO'] = '/auth/tokens'
- env['REQUEST_METHOD'] = 'POST'
- req = make_request(environ=env)
- context = self._create_context(request=req)
+ req = self._do_middleware_request(path='/auth/tokens', method='post')
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self.assertIsNone(context)
def test_no_tokenless_attributes_request(self):
- req = make_request()
- context = self._create_context(request=req)
+ req = self._do_middleware_request()
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self.assertIsNone(context)
def test_no_issuer_attribute_request(self):
env = {}
env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
- req = make_request(environ=env)
- context = self._create_context(request=req)
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self.assertIsNone(context)
def test_has_only_issuer_and_project_name_request(self):
@@ -332,61 +357,51 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
# references to issuer of the client certificate.
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex
- req = make_request(environ=env)
- context = self._create_context(request=req,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_has_only_issuer_and_project_domain_name_request(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex
- req = make_request(environ=env)
- context = self._create_context(request=req,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_has_only_issuer_and_project_domain_id_request(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_DOMAIN_ID'] = uuid.uuid4().hex
- req = make_request(environ=env)
- context = self._create_context(request=req,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_missing_both_domain_and_project_request(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
- req = make_request(environ=env)
- context = self._create_context(request=req,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_empty_trusted_issuer_list(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
- req = make_request(environ=env)
+
self.config_fixture.config(group='tokenless_auth',
trusted_issuer=[])
- context = self._create_context(request=req)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self.assertIsNone(context)
def test_client_issuer_not_trusted(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.untrusted_client_issuer
env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
- req = make_request(environ=env)
- context = self._create_context(request=req)
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self.assertIsNone(context)
def test_proj_scope_with_proj_id_and_proj_dom_id_success(self):
@@ -397,24 +412,28 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
# SSL_CLIENT_USER_NAME and SSL_CLIENT_DOMAIN_NAME are the types
# defined in the mapping that will map to the user name and
# domain name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_id_only_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_name_and_proj_dom_id_success(self):
@@ -422,12 +441,14 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_name_and_proj_dom_name_success(self):
@@ -435,28 +456,29 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_name_only_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_id
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_mapping_with_userid_and_domainid_success(self):
env = {}
@@ -465,10 +487,12 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_ID'] = self.user['id']
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINID)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINID)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_mapping_with_userid_and_domainname_success(self):
@@ -478,10 +502,12 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_ID'] = self.user['id']
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINNAME)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINNAME)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_mapping_with_username_and_domainid_success(self):
@@ -489,12 +515,14 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_only_domain_name_fail(self):
@@ -503,14 +531,13 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_DOMAINNAME_ONLY,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_DOMAINNAME_ONLY)
+
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_only_domain_id_fail(self):
env = {}
@@ -518,29 +545,27 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_DOMAINID_ONLY,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_DOMAINID_ONLY)
+
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_missing_domain_data_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
- env['SSL_CLIENT_USER_NAME'] = self.user_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_ONLY,
- exception_expected=True)
- self.assertRaises(exception.ValidationError,
- context.process_request,
- req)
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_ONLY)
+
+ self._middleware_failure(exception.ValidationError,
+ extra_environ=env,
+ status=400)
def test_userid_success(self):
env = {}
@@ -548,10 +573,10 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_USER_ID'] = self.user['id']
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERID_ONLY)
+
+ self._load_mapping_rules(mapping_fixtures.MAPPING_WITH_USERID_ONLY)
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context)
def test_domain_disable_fail(self):
@@ -559,37 +584,35 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
- req = make_request(environ=env)
+
self.domain['enabled'] = False
self.domain = self.resource_api.update_domain(
self.domain['id'], self.domain)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID,
- exception_expected=True)
- self.assertRaises(exception.Unauthorized,
- context.process_request,
- req)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
+ self._middleware_failure(exception.Unauthorized,
+ extra_environ=env,
+ status=401)
def test_user_disable_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
- req = make_request(environ=env)
+
self.user['enabled'] = False
self.user = self.identity_api.update_user(self.user['id'], self.user)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID,
- exception_expected=True)
- self.assertRaises(AssertionError,
- context.process_request,
- req)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
+
+ self._middleware_failure(AssertionError,
+ extra_environ=env)
def test_invalid_user_fail(self):
env = {}
@@ -598,30 +621,29 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
- req = make_request(environ=env)
- context = self._create_context(
- request=req,
- mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME,
- exception_expected=True)
- self.assertRaises(exception.UserNotFound,
- context.process_request,
- req)
+
+ self._load_mapping_rules(
+ mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
+
+ self._middleware_failure(exception.UserNotFound,
+ extra_environ=env,
+ status=404)
def test_ephemeral_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
- req = make_request(environ=env)
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
- mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
+ mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- context = self._create_context(
- request=req,
- mapping_ref=mapping)
+ self._load_mapping_rules(mapping)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context, ephemeral_user=True)
def test_ephemeral_with_default_user_type_success(self):
@@ -629,23 +651,25 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
- req = make_request(environ=env)
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
# this mapping does not have the user type defined
# and it should defaults to 'ephemeral' which is
# the expected type for the test case.
- mapping = mapping_fixtures.MAPPING_FOR_DEFAULT_EPHEMERAL_USER.copy()
+ mapping = copy.deepcopy(
+ mapping_fixtures.MAPPING_FOR_DEFAULT_EPHEMERAL_USER)
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- context = self._create_context(
- request=req,
- mapping_ref=mapping)
+ self._load_mapping_rules(mapping)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context, ephemeral_user=True)
def test_ephemeral_any_user_success(self):
- """Ephemeral user does not need a specified user
+ """Verify ephemeral user does not need a specified user.
+
Keystone is not looking to match the user, but a corresponding group.
"""
env = {}
@@ -653,15 +677,15 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex
- req = make_request(environ=env)
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
- mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
+ mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- context = self._create_context(
- request=req,
- mapping_ref=mapping)
+ self._load_mapping_rules(mapping)
+
+ req = self._do_middleware_request(extra_environ=env)
+ context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
self._assert_tokenless_auth_context(context, ephemeral_user=True)
def test_ephemeral_invalid_scope_fail(self):
@@ -669,43 +693,37 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex
env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex
- env['SSL_CLIENT_USER_NAME'] = self.user_name
- req = make_request(environ=env)
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
- mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
+ mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
- context = self._create_context(
- request=req,
- mapping_ref=mapping,
- exception_expected=True)
- self.assertRaises(exception.Unauthorized,
- context.process_request,
- req)
+ self._load_mapping_rules(mapping)
+
+ self._middleware_failure(exception.Unauthorized,
+ extra_environ=env,
+ status=401)
def test_ephemeral_no_group_found_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
- req = make_request(environ=env)
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
- mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
+ mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex
- context = self._create_context(
- request=req,
- mapping_ref=mapping,
- exception_expected=True)
- self.assertRaises(exception.MappedGroupNotFound,
- context.process_request,
- req)
+ self._load_mapping_rules(mapping)
+
+ self._middleware_failure(exception.MappedGroupNotFound,
+ extra_environ=env)
def test_ephemeral_incorrect_mapping_fail(self):
- """Ephemeral user picks up the non-ephemeral user mapping.
+ """Test ephemeral user picking up the non-ephemeral user mapping.
+
Looking up the mapping with protocol Id 'x509' will load up
the non-ephemeral user mapping, results unauthenticated.
"""
@@ -713,21 +731,17 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
- env['SSL_CLIENT_USER_NAME'] = self.user_name
- req = make_request(environ=env)
+ env['SSL_CLIENT_USER_NAME'] = self.user['name']
# This will pick up the incorrect mapping
self.config_fixture.config(group='tokenless_auth',
protocol='x509')
self.protocol_id = 'x509'
- mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
+ mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER)
mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex
- context = self._create_context(
- request=req,
- mapping_ref=mapping,
- exception_expected=True)
- self.assertRaises(exception.MappedGroupNotFound,
- context.process_request,
- req)
+ self._load_mapping_rules(mapping)
+
+ self._middleware_failure(exception.MappedGroupNotFound,
+ extra_environ=env)
def test_create_idp_id_success(self):
env = {}
diff --git a/keystone-moon/keystone/tests/unit/test_policy.py b/keystone-moon/keystone/tests/unit/test_policy.py
index 686e2b70..d6e911e9 100644
--- a/keystone-moon/keystone/tests/unit/test_policy.py
+++ b/keystone-moon/keystone/tests/unit/test_policy.py
@@ -23,22 +23,11 @@ from testtools import matchers
from keystone import exception
from keystone.policy.backends import rules
from keystone.tests import unit
+from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
-class BasePolicyTestCase(unit.TestCase):
- def setUp(self):
- super(BasePolicyTestCase, self).setUp()
- rules.reset()
- self.addCleanup(rules.reset)
- self.addCleanup(self.clear_cache_safely)
-
- def clear_cache_safely(self):
- if rules._ENFORCER:
- rules._ENFORCER.clear()
-
-
-class PolicyFileTestCase(BasePolicyTestCase):
+class PolicyFileTestCase(unit.TestCase):
def setUp(self):
# self.tmpfilename should exist before setUp super is called
# this is to ensure it is available for the config_fixture in
@@ -48,10 +37,8 @@ class PolicyFileTestCase(BasePolicyTestCase):
super(PolicyFileTestCase, self).setUp()
self.target = {}
- def config_overrides(self):
- super(PolicyFileTestCase, self).config_overrides()
- self.config_fixture.config(group='oslo_policy',
- policy_file=self.tmpfilename)
+ def _policy_fixture(self):
+ return ksfixtures.Policy(self.tmpfilename, self.config_fixture)
def test_modified_policy_reloads(self):
action = "example:test"
@@ -65,21 +52,10 @@ class PolicyFileTestCase(BasePolicyTestCase):
self.assertRaises(exception.ForbiddenAction, rules.enforce,
empty_credentials, action, self.target)
- def test_invalid_policy_raises_error(self):
- action = "example:test"
- empty_credentials = {}
- invalid_json = '{"example:test": [],}'
- with open(self.tmpfilename, "w") as policyfile:
- policyfile.write(invalid_json)
- self.assertRaises(ValueError, rules.enforce,
- empty_credentials, action, self.target)
-
-class PolicyTestCase(BasePolicyTestCase):
+class PolicyTestCase(unit.TestCase):
def setUp(self):
super(PolicyTestCase, self).setUp()
- # NOTE(vish): preload rules to circumvent reloading from file
- rules.init()
self.rules = {
"true": [],
"example:allowed": [],
@@ -137,17 +113,16 @@ class PolicyTestCase(BasePolicyTestCase):
def test_ignore_case_role_check(self):
lowercase_action = "example:lowercase_admin"
uppercase_action = "example:uppercase_admin"
- # NOTE(dprince) we mix case in the Admin role here to ensure
+ # NOTE(dprince): We mix case in the Admin role here to ensure
# case is ignored
admin_credentials = {'roles': ['AdMiN']}
rules.enforce(admin_credentials, lowercase_action, self.target)
rules.enforce(admin_credentials, uppercase_action, self.target)
-class DefaultPolicyTestCase(BasePolicyTestCase):
+class DefaultPolicyTestCase(unit.TestCase):
def setUp(self):
super(DefaultPolicyTestCase, self).setUp()
- rules.init()
self.rules = {
"default": [],
@@ -160,7 +135,7 @@ class DefaultPolicyTestCase(BasePolicyTestCase):
# its enforce() method even though rules has been initialized via
# set_rules(). To make it easier to do our tests, we're going to
# monkeypatch load_roles() so it does nothing. This seem like a bug in
- # Oslo policy as we shoudn't have to reload the rules if they have
+ # Oslo policy as we shouldn't have to reload the rules if they have
# already been set using set_rules().
self._old_load_rules = rules._ENFORCER.load_rules
self.addCleanup(setattr, rules._ENFORCER, 'load_rules',
diff --git a/keystone-moon/keystone/tests/unit/test_revoke.py b/keystone-moon/keystone/tests/unit/test_revoke.py
index 9062981f..82c0125a 100644
--- a/keystone-moon/keystone/tests/unit/test_revoke.py
+++ b/keystone-moon/keystone/tests/unit/test_revoke.py
@@ -20,8 +20,8 @@ from six.moves import range
from testtools import matchers
from keystone.common import utils
-from keystone.contrib.revoke import model
from keystone import exception
+from keystone.models import revoke_model
from keystone.tests import unit
from keystone.tests.unit import test_backend_sql
from keystone.token import provider
@@ -46,7 +46,7 @@ def _past_time():
def _sample_blank_token():
issued_delta = datetime.timedelta(minutes=-2)
issued_at = timeutils.utcnow() + issued_delta
- token_data = model.blank_token_data(issued_at)
+ token_data = revoke_model.blank_token_data(issued_at)
return token_data
@@ -61,13 +61,12 @@ def _matches(event, token_values):
value for the attribute, and it does not match the token, no match
is possible, so skip the remaining checks.
- :param event one revocation event to match
- :param token_values dictionary with set of values taken from the
+ :param event: one revocation event to match
+ :param token_values: dictionary with set of values taken from the
token
- :returns if the token matches the revocation event, indicating the
+ :returns: True if the token matches the revocation event, indicating the
token has been revoked
"""
-
# The token has three attributes that can match the user_id
if event.user_id is not None:
for attribute_name in ['user_id', 'trustor_id', 'trustee_id']:
@@ -126,15 +125,16 @@ class RevokeTests(object):
self.revoke_api.revoke_by_user(user_id=1)
self.revoke_api.revoke_by_user(user_id=2)
past = timeutils.utcnow() - datetime.timedelta(seconds=1000)
- self.assertEqual(2, len(self.revoke_api.list_events(past)))
+ self.assertEqual(2, len(self.revoke_api.list_events(last_fetch=past)))
future = timeutils.utcnow() + datetime.timedelta(seconds=1000)
- self.assertEqual(0, len(self.revoke_api.list_events(future)))
+ self.assertEqual(0,
+ len(self.revoke_api.list_events(last_fetch=future)))
def test_past_expiry_are_removed(self):
user_id = 1
self.revoke_api.revoke_by_expiration(user_id, _future_time())
self.assertEqual(1, len(self.revoke_api.list_events()))
- event = model.RevokeEvent()
+ event = revoke_model.RevokeEvent()
event.revoked_at = _past_time()
self.revoke_api.revoke(event)
self.assertEqual(1, len(self.revoke_api.list_events()))
@@ -184,32 +184,17 @@ class RevokeTests(object):
class SqlRevokeTests(test_backend_sql.SqlTests, RevokeTests):
def config_overrides(self):
super(SqlRevokeTests, self).config_overrides()
- self.config_fixture.config(group='revoke', driver='sql')
self.config_fixture.config(
group='token',
provider='pki',
revoke_by_id=False)
-class KvsRevokeTests(unit.TestCase, RevokeTests):
- def config_overrides(self):
- super(KvsRevokeTests, self).config_overrides()
- self.config_fixture.config(group='revoke', driver='kvs')
- self.config_fixture.config(
- group='token',
- provider='pki',
- revoke_by_id=False)
-
- def setUp(self):
- super(KvsRevokeTests, self).setUp()
- self.load_backends()
-
-
class RevokeTreeTests(unit.TestCase):
def setUp(self):
super(RevokeTreeTests, self).setUp()
self.events = []
- self.tree = model.RevokeTree()
+ self.tree = revoke_model.RevokeTree()
self._sample_data()
def _sample_data(self):
@@ -263,20 +248,20 @@ class RevokeTreeTests(unit.TestCase):
def _revoke_by_user(self, user_id):
return self.tree.add_event(
- model.RevokeEvent(user_id=user_id))
+ revoke_model.RevokeEvent(user_id=user_id))
def _revoke_by_audit_id(self, audit_id):
event = self.tree.add_event(
- model.RevokeEvent(audit_id=audit_id))
+ revoke_model.RevokeEvent(audit_id=audit_id))
self.events.append(event)
return event
def _revoke_by_audit_chain_id(self, audit_chain_id, project_id=None,
domain_id=None):
event = self.tree.add_event(
- model.RevokeEvent(audit_chain_id=audit_chain_id,
- project_id=project_id,
- domain_id=domain_id)
+ revoke_model.RevokeEvent(audit_chain_id=audit_chain_id,
+ project_id=project_id,
+ domain_id=domain_id)
)
self.events.append(event)
return event
@@ -284,46 +269,47 @@ class RevokeTreeTests(unit.TestCase):
def _revoke_by_expiration(self, user_id, expires_at, project_id=None,
domain_id=None):
event = self.tree.add_event(
- model.RevokeEvent(user_id=user_id,
- expires_at=expires_at,
- project_id=project_id,
- domain_id=domain_id))
+ revoke_model.RevokeEvent(user_id=user_id,
+ expires_at=expires_at,
+ project_id=project_id,
+ domain_id=domain_id))
self.events.append(event)
return event
def _revoke_by_grant(self, role_id, user_id=None,
domain_id=None, project_id=None):
event = self.tree.add_event(
- model.RevokeEvent(user_id=user_id,
- role_id=role_id,
- domain_id=domain_id,
- project_id=project_id))
+ revoke_model.RevokeEvent(user_id=user_id,
+ role_id=role_id,
+ domain_id=domain_id,
+ project_id=project_id))
self.events.append(event)
return event
def _revoke_by_user_and_project(self, user_id, project_id):
event = self.tree.add_event(
- model.RevokeEvent(project_id=project_id,
- user_id=user_id))
+ revoke_model.RevokeEvent(project_id=project_id,
+ user_id=user_id))
self.events.append(event)
return event
def _revoke_by_project_role_assignment(self, project_id, role_id):
event = self.tree.add_event(
- model.RevokeEvent(project_id=project_id,
- role_id=role_id))
+ revoke_model.RevokeEvent(project_id=project_id,
+ role_id=role_id))
self.events.append(event)
return event
def _revoke_by_domain_role_assignment(self, domain_id, role_id):
event = self.tree.add_event(
- model.RevokeEvent(domain_id=domain_id,
- role_id=role_id))
+ revoke_model.RevokeEvent(domain_id=domain_id,
+ role_id=role_id))
self.events.append(event)
return event
def _revoke_by_domain(self, domain_id):
- event = self.tree.add_event(model.RevokeEvent(domain_id=domain_id))
+ event = self.tree.add_event(
+ revoke_model.RevokeEvent(domain_id=domain_id))
self.events.append(event)
def _user_field_test(self, field_name):
diff --git a/keystone-moon/keystone/tests/unit/test_sql_livetest.py b/keystone-moon/keystone/tests/unit/test_sql_livetest.py
index e2186907..18b8ea91 100644
--- a/keystone-moon/keystone/tests/unit/test_sql_livetest.py
+++ b/keystone-moon/keystone/tests/unit/test_sql_livetest.py
@@ -13,7 +13,6 @@
# under the License.
from keystone.tests import unit
-from keystone.tests.unit import test_sql_migrate_extensions
from keystone.tests.unit import test_sql_upgrade
@@ -39,29 +38,6 @@ class MysqlMigrateTests(test_sql_upgrade.SqlUpgradeTests):
return files
-class PostgresqlRevokeExtensionsTests(
- test_sql_migrate_extensions.RevokeExtension):
- def setUp(self):
- self.skip_if_env_not_set('ENABLE_LIVE_POSTGRES_TEST')
- super(PostgresqlRevokeExtensionsTests, self).setUp()
-
- def config_files(self):
- files = super(PostgresqlRevokeExtensionsTests, self).config_files()
- files.append(unit.dirs.tests_conf("backend_postgresql.conf"))
- return files
-
-
-class MysqlRevokeExtensionsTests(test_sql_migrate_extensions.RevokeExtension):
- def setUp(self):
- self.skip_if_env_not_set('ENABLE_LIVE_MYSQL_TEST')
- super(MysqlRevokeExtensionsTests, self).setUp()
-
- def config_files(self):
- files = super(MysqlRevokeExtensionsTests, self).config_files()
- files.append(unit.dirs.tests_conf("backend_mysql.conf"))
- return files
-
-
class Db2MigrateTests(test_sql_upgrade.SqlUpgradeTests):
def setUp(self):
self.skip_if_env_not_set('ENABLE_LIVE_DB2_TEST')
diff --git a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
index f498fe94..0155f787 100644
--- a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
+++ b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py
@@ -29,369 +29,84 @@ WARNING::
all data will be lost.
"""
-import sqlalchemy
-import uuid
-
-from oslo_db import exception as db_exception
-from oslo_db.sqlalchemy import utils
-
from keystone.contrib import endpoint_filter
from keystone.contrib import endpoint_policy
-from keystone.contrib import example
from keystone.contrib import federation
from keystone.contrib import oauth1
from keystone.contrib import revoke
+from keystone import exception
from keystone.tests.unit import test_sql_upgrade
-class SqlUpgradeExampleExtension(test_sql_upgrade.SqlMigrateBase):
- def repo_package(self):
- return example
-
- def test_upgrade(self):
- self.assertTableDoesNotExist('example')
- self.upgrade(1, repository=self.repo_path)
- self.assertTableColumns('example', ['id', 'type', 'extra'])
+class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase):
+ OAUTH1_MIGRATIONS = 5
-class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase):
def repo_package(self):
return oauth1
- def upgrade(self, version):
- super(SqlUpgradeOAuth1Extension, self).upgrade(
- version, repository=self.repo_path)
-
- def _assert_v1_3_tables(self):
- self.assertTableColumns('consumer',
- ['id',
- 'description',
- 'secret',
- 'extra'])
- self.assertTableColumns('request_token',
- ['id',
- 'request_secret',
- 'verifier',
- 'authorizing_user_id',
- 'requested_project_id',
- 'requested_roles',
- 'consumer_id',
- 'expires_at'])
- self.assertTableColumns('access_token',
- ['id',
- 'access_secret',
- 'authorizing_user_id',
- 'project_id',
- 'requested_roles',
- 'consumer_id',
- 'expires_at'])
-
- def _assert_v4_later_tables(self):
- self.assertTableColumns('consumer',
- ['id',
- 'description',
- 'secret',
- 'extra'])
- self.assertTableColumns('request_token',
- ['id',
- 'request_secret',
- 'verifier',
- 'authorizing_user_id',
- 'requested_project_id',
- 'role_ids',
- 'consumer_id',
- 'expires_at'])
- self.assertTableColumns('access_token',
- ['id',
- 'access_secret',
- 'authorizing_user_id',
- 'project_id',
- 'role_ids',
- 'consumer_id',
- 'expires_at'])
-
def test_upgrade(self):
- self.assertTableDoesNotExist('consumer')
- self.assertTableDoesNotExist('request_token')
- self.assertTableDoesNotExist('access_token')
- self.upgrade(1)
- self._assert_v1_3_tables()
-
- # NOTE(blk-u): Migrations 2-3 don't modify the tables in a way that we
- # can easily test for.
+ for version in range(self.OAUTH1_MIGRATIONS):
+ v = version + 1
+ self.assertRaises(exception.MigrationMovedFailure,
+ self.upgrade, version=v,
+ repository=self.repo_path)
- self.upgrade(4)
- self._assert_v4_later_tables()
- self.upgrade(5)
- self._assert_v4_later_tables()
+class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase):
+ ENDPOINT_FILTER_MIGRATIONS = 2
-class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase):
def repo_package(self):
return endpoint_filter
- def upgrade(self, version):
- super(EndpointFilterExtension, self).upgrade(
- version, repository=self.repo_path)
-
- def _assert_v1_tables(self):
- self.assertTableColumns('project_endpoint',
- ['endpoint_id', 'project_id'])
- self.assertTableDoesNotExist('endpoint_group')
- self.assertTableDoesNotExist('project_endpoint_group')
-
- def _assert_v2_tables(self):
- self.assertTableColumns('project_endpoint',
- ['endpoint_id', 'project_id'])
- self.assertTableColumns('endpoint_group',
- ['id', 'name', 'description', 'filters'])
- self.assertTableColumns('project_endpoint_group',
- ['endpoint_group_id', 'project_id'])
-
def test_upgrade(self):
- self.assertTableDoesNotExist('project_endpoint')
- self.upgrade(1)
- self._assert_v1_tables()
- self.assertTableColumns('project_endpoint',
- ['endpoint_id', 'project_id'])
- self.upgrade(2)
- self._assert_v2_tables()
+ for version in range(self.ENDPOINT_FILTER_MIGRATIONS):
+ v = version + 1
+ self.assertRaises(exception.MigrationMovedFailure,
+ self.upgrade, version=v,
+ repository=self.repo_path)
class EndpointPolicyExtension(test_sql_upgrade.SqlMigrateBase):
+
+ ENDPOINT_POLICY_MIGRATIONS = 1
+
def repo_package(self):
return endpoint_policy
def test_upgrade(self):
- self.assertTableDoesNotExist('policy_association')
- self.upgrade(1, repository=self.repo_path)
- self.assertTableColumns('policy_association',
- ['id', 'policy_id', 'endpoint_id',
- 'service_id', 'region_id'])
+ self.assertRaises(exception.MigrationMovedFailure,
+ self.upgrade,
+ version=self.ENDPOINT_POLICY_MIGRATIONS,
+ repository=self.repo_path)
class FederationExtension(test_sql_upgrade.SqlMigrateBase):
- """Test class for ensuring the Federation SQL."""
- def setUp(self):
- super(FederationExtension, self).setUp()
- self.identity_provider = 'identity_provider'
- self.federation_protocol = 'federation_protocol'
- self.service_provider = 'service_provider'
- self.mapping = 'mapping'
- self.remote_id_table = 'idp_remote_ids'
+ FEDERATION_MIGRATIONS = 8
def repo_package(self):
return federation
- def insert_dict(self, session, table_name, d):
- """Naively inserts key-value pairs into a table, given a dictionary."""
- table = sqlalchemy.Table(table_name, self.metadata, autoload=True)
- insert = table.insert().values(**d)
- session.execute(insert)
- session.commit()
-
def test_upgrade(self):
- self.assertTableDoesNotExist(self.identity_provider)
- self.assertTableDoesNotExist(self.federation_protocol)
- self.assertTableDoesNotExist(self.mapping)
-
- self.upgrade(1, repository=self.repo_path)
- self.assertTableColumns(self.identity_provider,
- ['id',
- 'enabled',
- 'description'])
-
- self.assertTableColumns(self.federation_protocol,
- ['id',
- 'idp_id',
- 'mapping_id'])
-
- self.upgrade(2, repository=self.repo_path)
- self.assertTableColumns(self.mapping,
- ['id', 'rules'])
-
- federation_protocol = utils.get_table(
- self.engine,
- 'federation_protocol')
- with self.engine.begin() as conn:
- conn.execute(federation_protocol.insert(), id=0, idp_id=1)
- self.upgrade(3, repository=self.repo_path)
- federation_protocol = utils.get_table(
- self.engine,
- 'federation_protocol')
- self.assertFalse(federation_protocol.c.mapping_id.nullable)
-
- def test_service_provider_attributes_cannot_be_null(self):
- self.upgrade(6, repository=self.repo_path)
- self.assertTableColumns(self.service_provider,
- ['id', 'description', 'enabled', 'auth_url',
- 'sp_url'])
-
- session = self.Session()
- sp1 = {'id': uuid.uuid4().hex,
- 'auth_url': None,
- 'sp_url': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- sp2 = {'id': uuid.uuid4().hex,
- 'auth_url': uuid.uuid4().hex,
- 'sp_url': None,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- sp3 = {'id': uuid.uuid4().hex,
- 'auth_url': None,
- 'sp_url': None,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
-
- # Insert with 'auth_url' or 'sp_url' set to null must fail
- self.assertRaises(db_exception.DBError,
- self.insert_dict,
- session,
- self.service_provider,
- sp1)
- self.assertRaises(db_exception.DBError,
- self.insert_dict,
- session,
- self.service_provider,
- sp2)
- self.assertRaises(db_exception.DBError,
- self.insert_dict,
- session,
- self.service_provider,
- sp3)
-
- session.close()
-
- def test_fixup_service_provider_attributes(self):
- session = self.Session()
- sp1 = {'id': uuid.uuid4().hex,
- 'auth_url': None,
- 'sp_url': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- sp2 = {'id': uuid.uuid4().hex,
- 'auth_url': uuid.uuid4().hex,
- 'sp_url': None,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- sp3 = {'id': uuid.uuid4().hex,
- 'auth_url': None,
- 'sp_url': None,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- self.upgrade(5, repository=self.repo_path)
- self.assertTableColumns(self.service_provider,
- ['id', 'description', 'enabled', 'auth_url',
- 'sp_url'])
-
- # Before the migration, the table should accept null values
- self.insert_dict(session, self.service_provider, sp1)
- self.insert_dict(session, self.service_provider, sp2)
- self.insert_dict(session, self.service_provider, sp3)
-
- # Check if null values are updated to empty string when migrating
- session.close()
- self.upgrade(6, repository=self.repo_path)
- sp_table = sqlalchemy.Table(self.service_provider,
- self.metadata,
- autoload=True)
- session = self.Session()
- self.metadata.clear()
-
- sp = session.query(sp_table).filter(sp_table.c.id == sp1['id'])[0]
- self.assertEqual('', sp.auth_url)
-
- sp = session.query(sp_table).filter(sp_table.c.id == sp2['id'])[0]
- self.assertEqual('', sp.sp_url)
-
- sp = session.query(sp_table).filter(sp_table.c.id == sp3['id'])[0]
- self.assertEqual('', sp.auth_url)
- self.assertEqual('', sp.sp_url)
-
- def test_propagate_remote_id_to_separate_column(self):
- """Make sure empty remote_id is not propagated.
- Test scenario:
- - Upgrade database to version 6 where identity_provider table has a
- remote_id column
- - Add 3 identity provider objects, where idp1 and idp2 have valid
- remote_id parameter set, and idp3 has it empty (None).
- - Upgrade database to version 7 and expect migration scripts to
- properly move data rom identity_provider.remote_id column into
- separate table idp_remote_ids.
- - In the idp_remote_ids table expect to find entries for idp1 and idp2
- and not find anything for idp3 (identitified by idp's id)
-
- """
- session = self.Session()
- idp1 = {'id': uuid.uuid4().hex,
- 'remote_id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- idp2 = {'id': uuid.uuid4().hex,
- 'remote_id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- idp3 = {'id': uuid.uuid4().hex,
- 'remote_id': None,
- 'description': uuid.uuid4().hex,
- 'enabled': True}
- self.upgrade(6, repository=self.repo_path)
- self.assertTableColumns(self.identity_provider,
- ['id', 'description', 'enabled', 'remote_id'])
-
- self.insert_dict(session, self.identity_provider, idp1)
- self.insert_dict(session, self.identity_provider, idp2)
- self.insert_dict(session, self.identity_provider, idp3)
-
- session.close()
- self.upgrade(7, repository=self.repo_path)
-
- self.assertTableColumns(self.identity_provider,
- ['id', 'description', 'enabled'])
- remote_id_table = sqlalchemy.Table(self.remote_id_table,
- self.metadata,
- autoload=True)
-
- session = self.Session()
- self.metadata.clear()
-
- idp = session.query(remote_id_table).filter(
- remote_id_table.c.idp_id == idp1['id'])[0]
- self.assertEqual(idp1['remote_id'], idp.remote_id)
-
- idp = session.query(remote_id_table).filter(
- remote_id_table.c.idp_id == idp2['id'])[0]
- self.assertEqual(idp2['remote_id'], idp.remote_id)
-
- idp = session.query(remote_id_table).filter(
- remote_id_table.c.idp_id == idp3['id'])
- # NOTE(marek-denis): As idp3 had empty 'remote_id' attribute we expect
- # not to find it in the 'remote_id_table' table, hence count should be
- # 0.real
- self.assertEqual(0, idp.count())
-
- def test_add_relay_state_column(self):
- self.upgrade(8, repository=self.repo_path)
- self.assertTableColumns(self.service_provider,
- ['id', 'description', 'enabled', 'auth_url',
- 'relay_state_prefix', 'sp_url'])
+ for version in range(self.FEDERATION_MIGRATIONS):
+ v = version + 1
+ self.assertRaises(exception.MigrationMovedFailure,
+ self.upgrade, version=v,
+ repository=self.repo_path)
class RevokeExtension(test_sql_upgrade.SqlMigrateBase):
- _REVOKE_COLUMN_NAMES = ['id', 'domain_id', 'project_id', 'user_id',
- 'role_id', 'trust_id', 'consumer_id',
- 'access_token_id', 'issued_before', 'expires_at',
- 'revoked_at']
+ REVOKE_MIGRATIONS = 2
def repo_package(self):
return revoke
def test_upgrade(self):
- self.assertTableDoesNotExist('revocation_event')
- self.upgrade(1, repository=self.repo_path)
- self.assertTableColumns('revocation_event',
- self._REVOKE_COLUMN_NAMES)
+ for version in range(self.REVOKE_MIGRATIONS):
+ v = version + 1
+ self.assertRaises(exception.MigrationMovedFailure,
+ self.upgrade, version=v,
+ repository=self.repo_path)
diff --git a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
index d617d445..5ca12f66 100644
--- a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
+++ b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py
@@ -29,11 +29,13 @@ WARNING::
all data will be lost.
"""
-import copy
import json
import uuid
+import migrate
from migrate.versioning import api as versioning_api
+from migrate.versioning import repository
+import mock
from oslo_config import cfg
from oslo_db import exception as db_exception
from oslo_db.sqlalchemy import migration
@@ -41,12 +43,10 @@ from oslo_db.sqlalchemy import session as db_session
from sqlalchemy.engine import reflection
import sqlalchemy.exc
from sqlalchemy import schema
+from testtools import matchers
from keystone.common import sql
-from keystone.common.sql import migrate_repo
from keystone.common.sql import migration_helpers
-from keystone.contrib import federation
-from keystone.contrib import revoke
from keystone import exception
from keystone.tests import unit
from keystone.tests.unit import default_fixtures
@@ -54,7 +54,6 @@ from keystone.tests.unit.ksfixtures import database
CONF = cfg.CONF
-DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
# NOTE(morganfainberg): This should be updated when each DB migration collapse
# is done to mirror the expected structure of the DB in the format of
@@ -67,8 +66,8 @@ INITIAL_TABLE_STRUCTURE = {
'id', 'name', 'enabled', 'extra',
],
'endpoint': [
- 'id', 'legacy_endpoint_id', 'interface', 'region', 'service_id', 'url',
- 'enabled', 'extra',
+ 'id', 'legacy_endpoint_id', 'interface', 'region_id', 'service_id',
+ 'url', 'enabled', 'extra',
],
'group': [
'id', 'domain_id', 'name', 'description', 'extra',
@@ -78,6 +77,7 @@ INITIAL_TABLE_STRUCTURE = {
],
'project': [
'id', 'name', 'extra', 'description', 'enabled', 'domain_id',
+ 'parent_id',
],
'role': [
'id', 'name', 'extra',
@@ -108,23 +108,82 @@ INITIAL_TABLE_STRUCTURE = {
'assignment': [
'type', 'actor_id', 'target_id', 'role_id', 'inherited',
],
-}
-
-
-INITIAL_EXTENSION_TABLE_STRUCTURE = {
- 'revocation_event': [
- 'id', 'domain_id', 'project_id', 'user_id', 'role_id',
- 'trust_id', 'consumer_id', 'access_token_id',
- 'issued_before', 'expires_at', 'revoked_at', 'audit_id',
- 'audit_chain_id',
+ 'id_mapping': [
+ 'public_id', 'domain_id', 'local_id', 'entity_type',
+ ],
+ 'whitelisted_config': [
+ 'domain_id', 'group', 'option', 'value',
+ ],
+ 'sensitive_config': [
+ 'domain_id', 'group', 'option', 'value',
],
}
-EXTENSIONS = {'federation': federation,
- 'revoke': revoke}
+
+# Test migration_helpers.get_init_version separately to ensure it works before
+# using in the SqlUpgrade tests.
+class MigrationHelpersGetInitVersionTests(unit.TestCase):
+ @mock.patch.object(repository, 'Repository')
+ def test_get_init_version_no_path(self, repo):
+ migrate_versions = mock.MagicMock()
+ # make a version list starting with zero. `get_init_version` will
+ # return None for this value.
+ migrate_versions.versions.versions = list(range(0, 5))
+ repo.return_value = migrate_versions
+
+ # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid
+ # an exception.
+ with mock.patch('os.path.isdir', return_value=True):
+ # since 0 is the smallest version expect None
+ version = migration_helpers.get_init_version()
+ self.assertIsNone(version)
+
+ # check that the default path was used as the first argument to the
+ # first invocation of repo. Cannot match the full path because it is
+ # based on where the test is run.
+ param = repo.call_args_list[0][0][0]
+ self.assertTrue(param.endswith('/sql/migrate_repo'))
+
+ @mock.patch.object(repository, 'Repository')
+ def test_get_init_version_with_path_initial_version_0(self, repo):
+ migrate_versions = mock.MagicMock()
+ # make a version list starting with zero. `get_init_version` will
+ # return None for this value.
+ migrate_versions.versions.versions = list(range(0, 5))
+ repo.return_value = migrate_versions
+
+ # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid
+ # an exception.
+ with mock.patch('os.path.isdir', return_value=True):
+ path = '/keystone/migrate_repo/'
+
+ # since 0 is the smallest version expect None
+ version = migration_helpers.get_init_version(abs_path=path)
+ self.assertIsNone(version)
+
+ @mock.patch.object(repository, 'Repository')
+ def test_get_init_version_with_path(self, repo):
+ initial_version = 10
+
+ migrate_versions = mock.MagicMock()
+ migrate_versions.versions.versions = list(range(initial_version + 1,
+ initial_version + 5))
+ repo.return_value = migrate_versions
+
+ # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid
+ # an exception.
+ with mock.patch('os.path.isdir', return_value=True):
+ path = '/keystone/migrate_repo/'
+
+ version = migration_helpers.get_init_version(abs_path=path)
+ self.assertEqual(initial_version, version)
class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase):
+ # override this in subclasses. The default of zero covers tests such
+ # as extensions upgrades.
+ _initial_db_version = 0
+
def initialize_sql(self):
self.metadata = sqlalchemy.MetaData()
self.metadata.bind = self.engine
@@ -139,6 +198,7 @@ class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase):
def setUp(self):
super(SqlMigrateBase, self).setUp()
+ self.load_backends()
database.initialize_sql_session()
conn_str = CONF.database.connection
if (conn_str != unit.IN_MEM_DB_CONN_STRING and
@@ -155,7 +215,9 @@ class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase):
connection='sqlite:///%s' % db_file)
# create and share a single sqlalchemy engine for testing
- self.engine = sql.get_engine()
+ with sql.session_for_write() as session:
+ self.engine = session.get_bind()
+ self.addCleanup(self.cleanup_instance('engine'))
self.Session = db_session.get_maker(self.engine, autocommit=False)
self.addCleanup(sqlalchemy.orm.session.Session.close_all)
@@ -164,7 +226,8 @@ class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase):
self.repo_package())
self.schema = versioning_api.ControlledSchema.create(
self.engine,
- self.repo_path, self.initial_db_version)
+ self.repo_path,
+ self._initial_db_version)
# auto-detect the highest available schema version in the migrate_repo
self.max_version = self.schema.repository.version().version
@@ -229,6 +292,23 @@ class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase):
else:
raise AssertionError('Table "%s" already exists' % table_name)
+ def assertTableCountsMatch(self, table1_name, table2_name):
+ try:
+ table1 = self.select_table(table1_name)
+ except sqlalchemy.exc.NoSuchTableError:
+ raise AssertionError('Table "%s" does not exist' % table1_name)
+ try:
+ table2 = self.select_table(table2_name)
+ except sqlalchemy.exc.NoSuchTableError:
+ raise AssertionError('Table "%s" does not exist' % table2_name)
+ session = self.Session()
+ table1_count = session.execute(table1.count()).scalar()
+ table2_count = session.execute(table2.count()).scalar()
+ if table1_count != table2_count:
+ raise AssertionError('Table counts do not match: {0} ({1}), {2} '
+ '({3})'.format(table1_name, table1_count,
+ table2_name, table2_count))
+
def upgrade(self, *args, **kwargs):
self._migrate(*args, **kwargs)
@@ -257,50 +337,30 @@ class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase):
self.assertItemsEqual(expected_cols, actual_cols,
'%s table' % table_name)
- @property
- def initial_db_version(self):
- return getattr(self, '_initial_db_version', 0)
-
class SqlUpgradeTests(SqlMigrateBase):
-
- _initial_db_version = migrate_repo.DB_INIT_VERSION
+ _initial_db_version = migration_helpers.get_init_version()
def test_blank_db_to_start(self):
self.assertTableDoesNotExist('user')
def test_start_version_db_init_version(self):
- version = migration.db_version(sql.get_engine(), self.repo_path,
- migrate_repo.DB_INIT_VERSION)
+ with sql.session_for_write() as session:
+ version = migration.db_version(session.get_bind(), self.repo_path,
+ self._initial_db_version)
self.assertEqual(
- migrate_repo.DB_INIT_VERSION,
+ self._initial_db_version,
version,
- 'DB is not at version %s' % migrate_repo.DB_INIT_VERSION)
+ 'DB is not at version %s' % self._initial_db_version)
def test_upgrade_add_initial_tables(self):
- self.upgrade(migrate_repo.DB_INIT_VERSION + 1)
+ self.upgrade(self._initial_db_version + 1)
self.check_initial_table_structure()
def check_initial_table_structure(self):
for table in INITIAL_TABLE_STRUCTURE:
self.assertTableColumns(table, INITIAL_TABLE_STRUCTURE[table])
- # Ensure the default domain was properly created.
- default_domain = migration_helpers.get_default_domain()
-
- meta = sqlalchemy.MetaData()
- meta.bind = self.engine
-
- domain_table = sqlalchemy.Table('domain', meta, autoload=True)
-
- session = self.Session()
- q = session.query(domain_table)
- refs = q.all()
-
- self.assertEqual(1, len(refs))
- for k in default_domain.keys():
- self.assertEqual(default_domain[k], getattr(refs[0], k))
-
def insert_dict(self, session, table_name, d, table=None):
"""Naively inserts key-value pairs into a table, given a dictionary."""
if table is None:
@@ -312,127 +372,43 @@ class SqlUpgradeTests(SqlMigrateBase):
session.execute(insert)
session.commit()
- def test_id_mapping(self):
- self.upgrade(50)
- self.assertTableDoesNotExist('id_mapping')
- self.upgrade(51)
- self.assertTableExists('id_mapping')
-
- def test_region_url_upgrade(self):
- self.upgrade(52)
- self.assertTableColumns('region',
- ['id', 'description', 'parent_region_id',
- 'extra', 'url'])
-
- def test_endpoint_region_upgrade_columns(self):
- self.upgrade(53)
- self.assertTableColumns('endpoint',
- ['id', 'legacy_endpoint_id', 'interface',
- 'service_id', 'url', 'extra', 'enabled',
- 'region_id'])
- region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
- self.assertEqual(255, region_table.c.id.type.length)
- self.assertEqual(255, region_table.c.parent_region_id.type.length)
- endpoint_table = sqlalchemy.Table('endpoint',
- self.metadata,
- autoload=True)
- self.assertEqual(255, endpoint_table.c.region_id.type.length)
-
- def test_endpoint_region_migration(self):
- self.upgrade(52)
- session = self.Session()
- _small_region_name = '0' * 30
- _long_region_name = '0' * 255
- _clashing_region_name = '0' * 70
-
- def add_service():
- service_id = uuid.uuid4().hex
-
- service = {
- 'id': service_id,
- 'type': uuid.uuid4().hex
- }
-
- self.insert_dict(session, 'service', service)
-
- return service_id
-
- def add_endpoint(service_id, region):
- endpoint_id = uuid.uuid4().hex
-
- endpoint = {
- 'id': endpoint_id,
- 'interface': uuid.uuid4().hex[:8],
- 'service_id': service_id,
- 'url': uuid.uuid4().hex,
- 'region': region
- }
- self.insert_dict(session, 'endpoint', endpoint)
-
- return endpoint_id
-
- _service_id_ = add_service()
- add_endpoint(_service_id_, region=_long_region_name)
- add_endpoint(_service_id_, region=_long_region_name)
- add_endpoint(_service_id_, region=_clashing_region_name)
- add_endpoint(_service_id_, region=_small_region_name)
- add_endpoint(_service_id_, region=None)
-
- # upgrade to 53
- session.close()
- self.upgrade(53)
- session = self.Session()
- self.metadata.clear()
+ def test_kilo_squash(self):
+ self.upgrade(67)
- region_table = sqlalchemy.Table('region', self.metadata, autoload=True)
- self.assertEqual(1, session.query(region_table).
- filter_by(id=_long_region_name).count())
- self.assertEqual(1, session.query(region_table).
- filter_by(id=_clashing_region_name).count())
- self.assertEqual(1, session.query(region_table).
- filter_by(id=_small_region_name).count())
+ # In 053 the size of ID and parent region ID columns were changed
+ table = sqlalchemy.Table('region', self.metadata, autoload=True)
+ self.assertEqual(255, table.c.id.type.length)
+ self.assertEqual(255, table.c.parent_region_id.type.length)
+ table = sqlalchemy.Table('endpoint', self.metadata, autoload=True)
+ self.assertEqual(255, table.c.region_id.type.length)
- endpoint_table = sqlalchemy.Table('endpoint',
- self.metadata,
- autoload=True)
- self.assertEqual(5, session.query(endpoint_table).count())
- self.assertEqual(2, session.query(endpoint_table).
- filter_by(region_id=_long_region_name).count())
- self.assertEqual(1, session.query(endpoint_table).
- filter_by(region_id=_clashing_region_name).count())
- self.assertEqual(1, session.query(endpoint_table).
- filter_by(region_id=_small_region_name).count())
-
- def test_add_actor_id_index(self):
- self.upgrade(53)
- self.upgrade(54)
+ # In 054 an index was created for the actor_id of the assignment table
table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
index_data = [(idx.name, list(idx.columns.keys()))
for idx in table.indexes]
self.assertIn(('ix_actor_id', ['actor_id']), index_data)
- def test_token_user_id_and_trust_id_index_upgrade(self):
- self.upgrade(54)
- self.upgrade(55)
+ # In 055 indexes were created for user and trust IDs in the token table
table = sqlalchemy.Table('token', self.metadata, autoload=True)
index_data = [(idx.name, list(idx.columns.keys()))
for idx in table.indexes]
self.assertIn(('ix_token_user_id', ['user_id']), index_data)
self.assertIn(('ix_token_trust_id', ['trust_id']), index_data)
- def test_project_parent_id_upgrade(self):
- self.upgrade(61)
- self.assertTableColumns('project',
- ['id', 'name', 'extra', 'description',
- 'enabled', 'domain_id', 'parent_id'])
+ # In 062 the role ID foreign key was removed from the assignment table
+ if self.engine.name == "mysql":
+ self.assertFalse(self.does_fk_exist('assignment', 'role_id'))
- def test_drop_assignment_role_fk(self):
- self.upgrade(61)
- self.assertTrue(self.does_fk_exist('assignment', 'role_id'))
- self.upgrade(62)
+ # In 064 the domain ID FK was removed from the group and user tables
if self.engine.name != 'sqlite':
# sqlite does not support FK deletions (or enforcement)
- self.assertFalse(self.does_fk_exist('assignment', 'role_id'))
+ self.assertFalse(self.does_fk_exist('group', 'domain_id'))
+ self.assertFalse(self.does_fk_exist('user', 'domain_id'))
+
+ # In 067 the role ID index was removed from the assignment table
+ if self.engine.name == "mysql":
+ self.assertFalse(self._does_index_exist('assignment',
+ 'assignment_role_id_fkey'))
def test_insert_assignment_inherited_pk(self):
ASSIGNMENT_TABLE_NAME = 'assignment'
@@ -502,7 +478,6 @@ class SqlUpgradeTests(SqlMigrateBase):
def does_pk_exist(self, table, pk_column):
"""Checks whether a column is primary key on a table."""
-
inspector = reflection.Inspector.from_engine(self.engine)
pk_columns = inspector.get_pk_constraint(table)['constrained_columns']
@@ -515,119 +490,164 @@ class SqlUpgradeTests(SqlMigrateBase):
return True
return False
- def test_drop_region_url_upgrade(self):
- self.upgrade(63)
- self.assertTableColumns('region',
- ['id', 'description', 'parent_region_id',
- 'extra'])
-
- def test_domain_fk(self):
- self.upgrade(63)
- self.assertTrue(self.does_fk_exist('group', 'domain_id'))
- self.assertTrue(self.does_fk_exist('user', 'domain_id'))
- self.upgrade(64)
- if self.engine.name != 'sqlite':
- # sqlite does not support FK deletions (or enforcement)
- self.assertFalse(self.does_fk_exist('group', 'domain_id'))
- self.assertFalse(self.does_fk_exist('user', 'domain_id'))
-
- def test_add_domain_config(self):
- whitelisted_table = 'whitelisted_config'
- sensitive_table = 'sensitive_config'
- self.upgrade(64)
- self.assertTableDoesNotExist(whitelisted_table)
- self.assertTableDoesNotExist(sensitive_table)
- self.upgrade(65)
- self.assertTableColumns(whitelisted_table,
- ['domain_id', 'group', 'option', 'value'])
- self.assertTableColumns(sensitive_table,
- ['domain_id', 'group', 'option', 'value'])
-
- def test_fixup_service_name_value_upgrade(self):
- """Update service name data from `extra` to empty string."""
- def add_service(**extra_data):
- service_id = uuid.uuid4().hex
-
- service = {
- 'id': service_id,
- 'type': uuid.uuid4().hex,
- 'extra': json.dumps(extra_data),
- }
-
- self.insert_dict(session, 'service', service)
-
- return service_id
-
- self.upgrade(65)
- session = self.Session()
-
- # Services with extra values having a random attribute and
- # different combinations of name
- random_attr_name = uuid.uuid4().hex
- random_attr_value = uuid.uuid4().hex
- random_attr_str = "%s='%s'" % (random_attr_name, random_attr_value)
- random_attr_no_name = {random_attr_name: random_attr_value}
- random_attr_no_name_str = "%s='%s'" % (random_attr_name,
- random_attr_value)
- random_attr_name_value = {random_attr_name: random_attr_value,
- 'name': 'myname'}
- random_attr_name_value_str = 'name=myname,%s' % random_attr_str
- random_attr_name_empty = {random_attr_name: random_attr_value,
- 'name': ''}
- random_attr_name_empty_str = 'name=,%s' % random_attr_str
- random_attr_name_none = {random_attr_name: random_attr_value,
- 'name': None}
- random_attr_name_none_str = 'name=None,%s' % random_attr_str
-
- services = [
- (add_service(**random_attr_no_name),
- random_attr_name_empty, random_attr_no_name_str),
- (add_service(**random_attr_name_value),
- random_attr_name_value, random_attr_name_value_str),
- (add_service(**random_attr_name_empty),
- random_attr_name_empty, random_attr_name_empty_str),
- (add_service(**random_attr_name_none),
- random_attr_name_empty, random_attr_name_none_str),
- ]
-
- # NOTE(viktors): Add a service with empty extra field
- self.insert_dict(session, 'service',
- {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex})
-
- session.close()
- self.upgrade(66)
- session = self.Session()
-
- # Verify that the services have the expected values.
- self.metadata.clear()
- service_table = sqlalchemy.Table('service', self.metadata,
- autoload=True)
-
- def fetch_service_extra(service_id):
- cols = [service_table.c.extra]
- f = service_table.c.id == service_id
- s = sqlalchemy.select(cols).where(f)
- service = session.execute(s).fetchone()
- return json.loads(service.extra)
-
- for service_id, exp_extra, msg in services:
- extra = fetch_service_extra(service_id)
- self.assertDictEqual(exp_extra, extra, msg)
-
- def _does_index_exist(self, table_name, index_name):
+ def does_index_exist(self, table_name, index_name):
meta = sqlalchemy.MetaData(bind=self.engine)
- table = sqlalchemy.Table('assignment', meta, autoload=True)
+ table = sqlalchemy.Table(table_name, meta, autoload=True)
return index_name in [idx.name for idx in table.indexes]
- def test_drop_assignment_role_id_index_mysql(self):
- self.upgrade(66)
- if self.engine.name == "mysql":
- self.assertTrue(self._does_index_exist('assignment',
- 'assignment_role_id_fkey'))
- self.upgrade(67)
- if self.engine.name == "mysql":
- self.assertFalse(self._does_index_exist('assignment',
- 'assignment_role_id_fkey'))
+ def does_constraint_exist(self, table_name, constraint_name):
+ meta = sqlalchemy.MetaData(bind=self.engine)
+ table = sqlalchemy.Table(table_name, meta, autoload=True)
+ return constraint_name in [con.name for con in table.constraints]
+
+ def test_endpoint_policy_upgrade(self):
+ self.assertTableDoesNotExist('policy_association')
+ self.upgrade(81)
+ self.assertTableColumns('policy_association',
+ ['id', 'policy_id', 'endpoint_id',
+ 'service_id', 'region_id'])
+
+ @mock.patch.object(migration_helpers, 'get_db_version', return_value=1)
+ def test_endpoint_policy_already_migrated(self, mock_ep):
+
+ # By setting the return value to 1, the migration has already been
+ # run, and there's no need to create the table again
+
+ self.upgrade(81)
+
+ mock_ep.assert_called_once_with(extension='endpoint_policy',
+ engine=mock.ANY)
+
+ # It won't exist because we are mocking it, but we can verify
+ # that 081 did not create the table
+ self.assertTableDoesNotExist('policy_association')
+
+ def test_create_federation_tables(self):
+ self.identity_provider = 'identity_provider'
+ self.federation_protocol = 'federation_protocol'
+ self.service_provider = 'service_provider'
+ self.mapping = 'mapping'
+ self.remote_ids = 'idp_remote_ids'
+
+ self.assertTableDoesNotExist(self.identity_provider)
+ self.assertTableDoesNotExist(self.federation_protocol)
+ self.assertTableDoesNotExist(self.service_provider)
+ self.assertTableDoesNotExist(self.mapping)
+ self.assertTableDoesNotExist(self.remote_ids)
+
+ self.upgrade(82)
+ self.assertTableColumns(self.identity_provider,
+ ['id', 'description', 'enabled'])
+
+ self.assertTableColumns(self.federation_protocol,
+ ['id', 'idp_id', 'mapping_id'])
+
+ self.assertTableColumns(self.mapping,
+ ['id', 'rules'])
+
+ self.assertTableColumns(self.service_provider,
+ ['id', 'description', 'enabled', 'auth_url',
+ 'relay_state_prefix', 'sp_url'])
+
+ self.assertTableColumns(self.remote_ids, ['idp_id', 'remote_id'])
+
+ federation_protocol = sqlalchemy.Table(self.federation_protocol,
+ self.metadata,
+ autoload=True)
+ self.assertFalse(federation_protocol.c.mapping_id.nullable)
+
+ sp_table = sqlalchemy.Table(self.service_provider,
+ self.metadata,
+ autoload=True)
+ self.assertFalse(sp_table.c.auth_url.nullable)
+ self.assertFalse(sp_table.c.sp_url.nullable)
+
+ @mock.patch.object(migration_helpers, 'get_db_version', return_value=8)
+ def test_federation_already_migrated(self, mock_federation):
+
+ # By setting the return value to 8, the migration has already been
+ # run, and there's no need to create the table again.
+ self.upgrade(82)
+
+ mock_federation.assert_any_call(extension='federation',
+ engine=mock.ANY)
+
+ # It won't exist because we are mocking it, but we can verify
+ # that 082 did not create the table.
+ self.assertTableDoesNotExist('identity_provider')
+ self.assertTableDoesNotExist('federation_protocol')
+ self.assertTableDoesNotExist('mapping')
+ self.assertTableDoesNotExist('service_provider')
+ self.assertTableDoesNotExist('idp_remote_ids')
+
+ def test_create_oauth_tables(self):
+ consumer = 'consumer'
+ request_token = 'request_token'
+ access_token = 'access_token'
+ self.assertTableDoesNotExist(consumer)
+ self.assertTableDoesNotExist(request_token)
+ self.assertTableDoesNotExist(access_token)
+ self.upgrade(83)
+ self.assertTableColumns(consumer,
+ ['id',
+ 'description',
+ 'secret',
+ 'extra'])
+ self.assertTableColumns(request_token,
+ ['id',
+ 'request_secret',
+ 'verifier',
+ 'authorizing_user_id',
+ 'requested_project_id',
+ 'role_ids',
+ 'consumer_id',
+ 'expires_at'])
+ self.assertTableColumns(access_token,
+ ['id',
+ 'access_secret',
+ 'authorizing_user_id',
+ 'project_id',
+ 'role_ids',
+ 'consumer_id',
+ 'expires_at'])
+
+ @mock.patch.object(migration_helpers, 'get_db_version', return_value=5)
+ def test_oauth1_already_migrated(self, mock_oauth1):
+
+ # By setting the return value to 5, the migration has already been
+ # run, and there's no need to create the table again.
+ self.upgrade(83)
+
+ mock_oauth1.assert_any_call(extension='oauth1', engine=mock.ANY)
+
+ # It won't exist because we are mocking it, but we can verify
+ # that 083 did not create the table.
+ self.assertTableDoesNotExist('consumer')
+ self.assertTableDoesNotExist('request_token')
+ self.assertTableDoesNotExist('access_token')
+
+ def test_create_revoke_table(self):
+ self.assertTableDoesNotExist('revocation_event')
+ self.upgrade(84)
+ self.assertTableColumns('revocation_event',
+ ['id', 'domain_id', 'project_id', 'user_id',
+ 'role_id', 'trust_id', 'consumer_id',
+ 'access_token_id', 'issued_before',
+ 'expires_at', 'revoked_at',
+ 'audit_chain_id', 'audit_id'])
+
+ @mock.patch.object(migration_helpers, 'get_db_version', return_value=2)
+ def test_revoke_already_migrated(self, mock_revoke):
+
+ # By setting the return value to 2, the migration has already been
+ # run, and there's no need to create the table again.
+ self.upgrade(84)
+
+ mock_revoke.assert_any_call(extension='revoke', engine=mock.ANY)
+
+ # It won't exist because we are mocking it, but we can verify
+ # that 084 did not create the table.
+ self.assertTableDoesNotExist('revocation_event')
def test_project_is_domain_upgrade(self):
self.upgrade(74)
@@ -636,6 +656,13 @@ class SqlUpgradeTests(SqlMigrateBase):
'enabled', 'domain_id', 'parent_id',
'is_domain'])
+ def test_implied_roles_upgrade(self):
+ self.upgrade(87)
+ self.assertTableColumns('implied_role',
+ ['prior_role_id', 'implied_role_id'])
+ self.assertTrue(self.does_fk_exist('implied_role', 'prior_role_id'))
+ self.assertTrue(self.does_fk_exist('implied_role', 'implied_role_id'))
+
def test_add_config_registration(self):
config_registration = 'config_register'
self.upgrade(74)
@@ -643,136 +670,497 @@ class SqlUpgradeTests(SqlMigrateBase):
self.upgrade(75)
self.assertTableColumns(config_registration, ['type', 'domain_id'])
- def populate_user_table(self, with_pass_enab=False,
- with_pass_enab_domain=False):
- # Populate the appropriate fields in the user
- # table, depending on the parameters:
- #
- # Default: id, name, extra
- # pass_enab: Add password, enabled as well
- # pass_enab_domain: Add password, enabled and domain as well
- #
- this_table = sqlalchemy.Table("user",
- self.metadata,
- autoload=True)
- for user in default_fixtures.USERS:
- extra = copy.deepcopy(user)
- extra.pop('id')
- extra.pop('name')
-
- if with_pass_enab:
- password = extra.pop('password', None)
- enabled = extra.pop('enabled', True)
- ins = this_table.insert().values(
+ def test_endpoint_filter_upgrade(self):
+ def assert_tables_columns_exist():
+ self.assertTableColumns('project_endpoint',
+ ['endpoint_id', 'project_id'])
+ self.assertTableColumns('endpoint_group',
+ ['id', 'name', 'description', 'filters'])
+ self.assertTableColumns('project_endpoint_group',
+ ['endpoint_group_id', 'project_id'])
+
+ self.assertTableDoesNotExist('project_endpoint')
+ self.upgrade(85)
+ assert_tables_columns_exist()
+
+ @mock.patch.object(migration_helpers, 'get_db_version', return_value=2)
+ def test_endpoint_filter_already_migrated(self, mock_endpoint_filter):
+
+ # By setting the return value to 2, the migration has already been
+ # run, and there's no need to create the table again.
+ self.upgrade(85)
+
+ mock_endpoint_filter.assert_any_call(extension='endpoint_filter',
+ engine=mock.ANY)
+
+ # It won't exist because we are mocking it, but we can verify
+ # that 085 did not create the table.
+ self.assertTableDoesNotExist('project_endpoint')
+ self.assertTableDoesNotExist('endpoint_group')
+ self.assertTableDoesNotExist('project_endpoint_group')
+
+ def test_add_trust_unique_constraint_upgrade(self):
+ self.upgrade(86)
+ inspector = reflection.Inspector.from_engine(self.engine)
+ constraints = inspector.get_unique_constraints('trust')
+ constraint_names = [constraint['name'] for constraint in constraints]
+ self.assertIn('duplicate_trust_constraint', constraint_names)
+
+ def test_add_domain_specific_roles(self):
+ """Check database upgraded successfully for domain specific roles.
+
+ The following items need to be checked:
+
+ - The domain_id column has been added
+ - That it has been added to the uniqueness constraints
+ - Existing roles have their domain_id columns set to the specific
+ string of '<<null>>'
+
+ """
+ NULL_DOMAIN_ID = '<<null>>'
+
+ self.upgrade(87)
+ session = self.Session()
+ role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
+ # Add a role before we upgrade, so we can check that its new domain_id
+ # attribute is handled correctly
+ role_id = uuid.uuid4().hex
+ self.insert_dict(session, 'role',
+ {'id': role_id, 'name': uuid.uuid4().hex})
+ session.close()
+
+ self.upgrade(88)
+
+ session = self.Session()
+ self.metadata.clear()
+ self.assertTableColumns('role', ['id', 'name', 'domain_id', 'extra'])
+ # Check the domain_id has been added to the uniqueness constraint
+ inspector = reflection.Inspector.from_engine(self.engine)
+ constraints = inspector.get_unique_constraints('role')
+ constraint_columns = [
+ constraint['column_names'] for constraint in constraints
+ if constraint['name'] == 'ixu_role_name_domain_id']
+ self.assertIn('domain_id', constraint_columns[0])
+
+ # Now check our role has its domain_id attribute set correctly
+ role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
+ cols = [role_table.c.domain_id]
+ filter = role_table.c.id == role_id
+ statement = sqlalchemy.select(cols).where(filter)
+ role_entry = session.execute(statement).fetchone()
+ self.assertEqual(NULL_DOMAIN_ID, role_entry[0])
+
+ def test_add_root_of_all_domains(self):
+ NULL_DOMAIN_ID = '<<keystone.domain.root>>'
+ self.upgrade(89)
+ session = self.Session()
+
+ domain_table = sqlalchemy.Table(
+ 'domain', self.metadata, autoload=True)
+ query = session.query(domain_table).filter_by(id=NULL_DOMAIN_ID)
+ domain_from_db = query.one()
+ self.assertIn(NULL_DOMAIN_ID, domain_from_db)
+
+ project_table = sqlalchemy.Table(
+ 'project', self.metadata, autoload=True)
+ query = session.query(project_table).filter_by(id=NULL_DOMAIN_ID)
+ project_from_db = query.one()
+ self.assertIn(NULL_DOMAIN_ID, project_from_db)
+
+ session.close()
+
+ def test_add_local_user_and_password_tables(self):
+ local_user_table = 'local_user'
+ password_table = 'password'
+ self.upgrade(89)
+ self.assertTableDoesNotExist(local_user_table)
+ self.assertTableDoesNotExist(password_table)
+ self.upgrade(90)
+ self.assertTableColumns(local_user_table,
+ ['id',
+ 'user_id',
+ 'domain_id',
+ 'name'])
+ self.assertTableColumns(password_table,
+ ['id',
+ 'local_user_id',
+ 'password'])
+
+ def test_migrate_data_to_local_user_and_password_tables(self):
+ def get_expected_users():
+ expected_users = []
+ for test_user in default_fixtures.USERS:
+ user = {}
+ user['id'] = uuid.uuid4().hex
+ user['name'] = test_user['name']
+ user['domain_id'] = test_user['domain_id']
+ user['password'] = test_user['password']
+ user['enabled'] = True
+ user['extra'] = json.dumps(uuid.uuid4().hex)
+ user['default_project_id'] = uuid.uuid4().hex
+ expected_users.append(user)
+ return expected_users
+
+ def add_users_to_db(expected_users, user_table):
+ for user in expected_users:
+ ins = user_table.insert().values(
{'id': user['id'],
'name': user['name'],
- 'password': password,
- 'enabled': bool(enabled),
- 'extra': json.dumps(extra)})
- else:
- if with_pass_enab_domain:
- password = extra.pop('password', None)
- enabled = extra.pop('enabled', True)
- extra.pop('domain_id')
- ins = this_table.insert().values(
- {'id': user['id'],
- 'name': user['name'],
- 'domain_id': user['domain_id'],
- 'password': password,
- 'enabled': bool(enabled),
- 'extra': json.dumps(extra)})
- else:
- ins = this_table.insert().values(
- {'id': user['id'],
- 'name': user['name'],
- 'extra': json.dumps(extra)})
- self.engine.execute(ins)
-
- def populate_tenant_table(self, with_desc_enab=False,
- with_desc_enab_domain=False):
- # Populate the appropriate fields in the tenant or
- # project table, depending on the parameters
- #
- # Default: id, name, extra
- # desc_enab: Add description, enabled as well
- # desc_enab_domain: Add description, enabled and domain as well,
- # plus use project instead of tenant
- #
- if with_desc_enab_domain:
- # By this time tenants are now projects
- this_table = sqlalchemy.Table("project",
- self.metadata,
+ 'domain_id': user['domain_id'],
+ 'password': user['password'],
+ 'enabled': user['enabled'],
+ 'extra': user['extra'],
+ 'default_project_id': user['default_project_id']})
+ ins.execute()
+
+ def get_users_from_db(user_table, local_user_table, password_table):
+ sel = (
+ sqlalchemy.select([user_table.c.id,
+ user_table.c.enabled,
+ user_table.c.extra,
+ user_table.c.default_project_id,
+ local_user_table.c.name,
+ local_user_table.c.domain_id,
+ password_table.c.password])
+ .select_from(user_table.join(local_user_table,
+ user_table.c.id ==
+ local_user_table.c.user_id)
+ .join(password_table,
+ local_user_table.c.id ==
+ password_table.c.local_user_id))
+ )
+ user_rows = sel.execute()
+ users = []
+ for row in user_rows:
+ users.append(
+ {'id': row['id'],
+ 'name': row['name'],
+ 'domain_id': row['domain_id'],
+ 'password': row['password'],
+ 'enabled': row['enabled'],
+ 'extra': row['extra'],
+ 'default_project_id': row['default_project_id']})
+ return users
+
+ meta = sqlalchemy.MetaData()
+ meta.bind = self.engine
+
+ user_table_name = 'user'
+ local_user_table_name = 'local_user'
+ password_table_name = 'password'
+
+ # populate current user table
+ self.upgrade(90)
+ user_table = sqlalchemy.Table(user_table_name, meta, autoload=True)
+ expected_users = get_expected_users()
+ add_users_to_db(expected_users, user_table)
+
+ # upgrade to migration and test
+ self.upgrade(91)
+ self.assertTableCountsMatch(user_table_name, local_user_table_name)
+ self.assertTableCountsMatch(local_user_table_name, password_table_name)
+ meta.clear()
+ user_table = sqlalchemy.Table(user_table_name, meta, autoload=True)
+ local_user_table = sqlalchemy.Table(local_user_table_name, meta,
+ autoload=True)
+ password_table = sqlalchemy.Table(password_table_name, meta,
autoload=True)
+ actual_users = get_users_from_db(user_table, local_user_table,
+ password_table)
+ self.assertListEqual(expected_users, actual_users)
+
+ def test_migrate_user_with_null_password_to_password_tables(self):
+ USER_TABLE_NAME = 'user'
+ LOCAL_USER_TABLE_NAME = 'local_user'
+ PASSWORD_TABLE_NAME = 'password'
+ self.upgrade(90)
+ user_ref = unit.new_user_ref(uuid.uuid4().hex)
+ user_ref.pop('password')
+ # pop extra attribute which doesn't recognized by SQL expression
+ # layer.
+ user_ref.pop('email')
+ session = self.Session()
+ self.insert_dict(session, USER_TABLE_NAME, user_ref)
+ self.metadata.clear()
+ self.upgrade(91)
+ # migration should be successful.
+ self.assertTableCountsMatch(USER_TABLE_NAME, LOCAL_USER_TABLE_NAME)
+ # no new entry was added to the password table because the
+ # user doesn't have a password.
+ password_table = self.select_table(PASSWORD_TABLE_NAME)
+ rows = session.execute(password_table.count()).scalar()
+ self.assertEqual(0, rows)
+
+ def test_migrate_user_skip_user_already_exist_in_local_user(self):
+ USER_TABLE_NAME = 'user'
+ LOCAL_USER_TABLE_NAME = 'local_user'
+ self.upgrade(90)
+ user1_ref = unit.new_user_ref(uuid.uuid4().hex)
+ # pop extra attribute which doesn't recognized by SQL expression
+ # layer.
+ user1_ref.pop('email')
+ user2_ref = unit.new_user_ref(uuid.uuid4().hex)
+ user2_ref.pop('email')
+ session = self.Session()
+ self.insert_dict(session, USER_TABLE_NAME, user1_ref)
+ self.insert_dict(session, USER_TABLE_NAME, user2_ref)
+ user_id = user1_ref.pop('id')
+ user_name = user1_ref.pop('name')
+ domain_id = user1_ref.pop('domain_id')
+ local_user_ref = {'user_id': user_id, 'name': user_name,
+ 'domain_id': domain_id}
+ self.insert_dict(session, LOCAL_USER_TABLE_NAME, local_user_ref)
+ self.metadata.clear()
+ self.upgrade(91)
+ # migration should be successful and user2_ref has been migrated to
+ # `local_user` table.
+ self.assertTableCountsMatch(USER_TABLE_NAME, LOCAL_USER_TABLE_NAME)
+
+ def test_implied_roles_fk_on_delete_cascade(self):
+ if self.engine.name == 'sqlite':
+ self.skipTest('sqlite backend does not support foreign keys')
+
+ self.upgrade(92)
+
+ def _create_three_roles():
+ id_list = []
+ for _ in range(3):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ id_list.append(role['id'])
+ return id_list
+
+ role_id_list = _create_three_roles()
+ self.role_api.create_implied_role(role_id_list[0], role_id_list[1])
+ self.role_api.create_implied_role(role_id_list[0], role_id_list[2])
+
+ # assert that there are two roles implied by role 0.
+ implied_roles = self.role_api.list_implied_roles(role_id_list[0])
+ self.assertThat(implied_roles, matchers.HasLength(2))
+
+ self.role_api.delete_role(role_id_list[0])
+ # assert the cascade deletion is effective.
+ implied_roles = self.role_api.list_implied_roles(role_id_list[0])
+ self.assertThat(implied_roles, matchers.HasLength(0))
+
+ def test_domain_as_project_upgrade(self):
+
+ def _populate_domain_and_project_tables(session):
+ # Three domains, with various different attributes
+ self.domains = [{'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True,
+ 'extra': {'description': uuid.uuid4().hex,
+ 'another_attribute': True}},
+ {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': True,
+ 'extra': {'description': uuid.uuid4().hex}},
+ {'id': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex,
+ 'enabled': False}]
+ # Four projects, two top level, two children
+ self.projects = []
+ self.projects.append(unit.new_project_ref(
+ domain_id=self.domains[0]['id'],
+ parent_id=None))
+ self.projects.append(unit.new_project_ref(
+ domain_id=self.domains[0]['id'],
+ parent_id=self.projects[0]['id']))
+ self.projects.append(unit.new_project_ref(
+ domain_id=self.domains[1]['id'],
+ parent_id=None))
+ self.projects.append(unit.new_project_ref(
+ domain_id=self.domains[1]['id'],
+ parent_id=self.projects[2]['id']))
+
+ for domain in self.domains:
+ this_domain = domain.copy()
+ if 'extra' in this_domain:
+ this_domain['extra'] = json.dumps(this_domain['extra'])
+ self.insert_dict(session, 'domain', this_domain)
+ for project in self.projects:
+ self.insert_dict(session, 'project', project)
+
+ def _check_projects(projects):
+
+ def _assert_domain_matches_project(project):
+ for domain in self.domains:
+ if project.id == domain['id']:
+ self.assertEqual(domain['name'], project.name)
+ self.assertEqual(domain['enabled'], project.enabled)
+ if domain['id'] == self.domains[0]['id']:
+ self.assertEqual(domain['extra']['description'],
+ project.description)
+ self.assertEqual({'another_attribute': True},
+ json.loads(project.extra))
+ elif domain['id'] == self.domains[1]['id']:
+ self.assertEqual(domain['extra']['description'],
+ project.description)
+ self.assertEqual({}, json.loads(project.extra))
+
+ # We had domains 3 we created, which should now be projects acting
+ # as domains, To this we add the 4 original projects, plus the root
+ # of all domains row.
+ self.assertEqual(8, projects.count())
+
+ project_ids = []
+ for project in projects:
+ if project.is_domain:
+ self.assertEqual(NULL_DOMAIN_ID, project.domain_id)
+ self.assertIsNone(project.parent_id)
+ else:
+ self.assertIsNotNone(project.domain_id)
+ self.assertIsNotNone(project.parent_id)
+ project_ids.append(project.id)
+
+ for domain in self.domains:
+ self.assertIn(domain['id'], project_ids)
+ for project in self.projects:
+ self.assertIn(project['id'], project_ids)
+
+ # Now check the attributes of the domains came across OK
+ for project in projects:
+ _assert_domain_matches_project(project)
+
+ NULL_DOMAIN_ID = '<<keystone.domain.root>>'
+ self.upgrade(92)
+
+ session = self.Session()
+
+ _populate_domain_and_project_tables(session)
+
+ self.upgrade(93)
+ proj_table = sqlalchemy.Table('project', self.metadata, autoload=True)
+
+ projects = session.query(proj_table)
+ _check_projects(projects)
+
+ def test_add_federated_user_table(self):
+ federated_user_table = 'federated_user'
+ self.upgrade(93)
+ self.assertTableDoesNotExist(federated_user_table)
+ self.upgrade(94)
+ self.assertTableColumns(federated_user_table,
+ ['id',
+ 'user_id',
+ 'idp_id',
+ 'protocol_id',
+ 'unique_id',
+ 'display_name'])
+
+ def test_add_int_pkey_to_revocation_event_table(self):
+ meta = sqlalchemy.MetaData()
+ meta.bind = self.engine
+ REVOCATION_EVENT_TABLE_NAME = 'revocation_event'
+ self.upgrade(94)
+ revocation_event_table = sqlalchemy.Table(REVOCATION_EVENT_TABLE_NAME,
+ meta, autoload=True)
+ # assert id column is a string (before)
+ self.assertEqual('VARCHAR(64)', str(revocation_event_table.c.id.type))
+ self.upgrade(95)
+ meta.clear()
+ revocation_event_table = sqlalchemy.Table(REVOCATION_EVENT_TABLE_NAME,
+ meta, autoload=True)
+ # assert id column is an integer (after)
+ self.assertEqual('INTEGER', str(revocation_event_table.c.id.type))
+
+ def _add_unique_constraint_to_role_name(self,
+ constraint_name='ixu_role_name'):
+ meta = sqlalchemy.MetaData()
+ meta.bind = self.engine
+ role_table = sqlalchemy.Table('role', meta, autoload=True)
+ migrate.UniqueConstraint(role_table.c.name,
+ name=constraint_name).create()
+
+ def _drop_unique_constraint_to_role_name(self,
+ constraint_name='ixu_role_name'):
+ role_table = sqlalchemy.Table('role', self.metadata, autoload=True)
+ migrate.UniqueConstraint(role_table.c.name,
+ name=constraint_name).drop()
+
+ def test_migration_88_drops_unique_constraint(self):
+ self.upgrade(87)
+ if self.engine.name == 'mysql':
+ self.assertTrue(self.does_index_exist('role', 'ixu_role_name'))
else:
- this_table = sqlalchemy.Table("tenant",
- self.metadata,
- autoload=True)
+ self.assertTrue(self.does_constraint_exist('role',
+ 'ixu_role_name'))
+ self.upgrade(88)
+ if self.engine.name == 'mysql':
+ self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
+ else:
+ self.assertFalse(self.does_constraint_exist('role',
+ 'ixu_role_name'))
- for tenant in default_fixtures.TENANTS:
- extra = copy.deepcopy(tenant)
- extra.pop('id')
- extra.pop('name')
-
- if with_desc_enab:
- desc = extra.pop('description', None)
- enabled = extra.pop('enabled', True)
- ins = this_table.insert().values(
- {'id': tenant['id'],
- 'name': tenant['name'],
- 'description': desc,
- 'enabled': bool(enabled),
- 'extra': json.dumps(extra)})
- else:
- if with_desc_enab_domain:
- desc = extra.pop('description', None)
- enabled = extra.pop('enabled', True)
- extra.pop('domain_id')
- ins = this_table.insert().values(
- {'id': tenant['id'],
- 'name': tenant['name'],
- 'domain_id': tenant['domain_id'],
- 'description': desc,
- 'enabled': bool(enabled),
- 'extra': json.dumps(extra)})
- else:
- ins = this_table.insert().values(
- {'id': tenant['id'],
- 'name': tenant['name'],
- 'extra': json.dumps(extra)})
- self.engine.execute(ins)
-
- def _mysql_check_all_tables_innodb(self):
- database = self.engine.url.database
-
- connection = self.engine.connect()
- # sanity check
- total = connection.execute("SELECT count(*) "
- "from information_schema.TABLES "
- "where TABLE_SCHEMA='%(database)s'" %
- dict(database=database))
- self.assertTrue(total.scalar() > 0, "No tables found. Wrong schema?")
-
- noninnodb = connection.execute("SELECT table_name "
- "from information_schema.TABLES "
- "where TABLE_SCHEMA='%(database)s' "
- "and ENGINE!='InnoDB' "
- "and TABLE_NAME!='migrate_version'" %
- dict(database=database))
- names = [x[0] for x in noninnodb]
- self.assertEqual([], names,
- "Non-InnoDB tables exist")
-
- connection.close()
+ def test_migration_88_inconsistent_constraint_name(self):
+ self.upgrade(87)
+ self._drop_unique_constraint_to_role_name()
+
+ constraint_name = uuid.uuid4().hex
+ self._add_unique_constraint_to_role_name(
+ constraint_name=constraint_name)
+
+ if self.engine.name == 'mysql':
+ self.assertTrue(self.does_index_exist('role', constraint_name))
+ self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
+ else:
+ self.assertTrue(self.does_constraint_exist('role',
+ constraint_name))
+ self.assertFalse(self.does_constraint_exist('role',
+ 'ixu_role_name'))
+
+ self.upgrade(88)
+ if self.engine.name == 'mysql':
+ self.assertFalse(self.does_index_exist('role', constraint_name))
+ self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
+ else:
+ self.assertFalse(self.does_constraint_exist('role',
+ constraint_name))
+ self.assertFalse(self.does_constraint_exist('role',
+ 'ixu_role_name'))
+
+ def test_migration_96(self):
+ self.upgrade(95)
+ if self.engine.name == 'mysql':
+ self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
+ else:
+ self.assertFalse(self.does_constraint_exist('role',
+ 'ixu_role_name'))
+
+ self.upgrade(96)
+ if self.engine.name == 'mysql':
+ self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
+ else:
+ self.assertFalse(self.does_constraint_exist('role',
+ 'ixu_role_name'))
+
+ def test_migration_96_constraint_exists(self):
+ self.upgrade(95)
+ self._add_unique_constraint_to_role_name()
+
+ if self.engine.name == 'mysql':
+ self.assertTrue(self.does_index_exist('role', 'ixu_role_name'))
+ else:
+ self.assertTrue(self.does_constraint_exist('role',
+ 'ixu_role_name'))
+
+ self.upgrade(96)
+ if self.engine.name == 'mysql':
+ self.assertFalse(self.does_index_exist('role', 'ixu_role_name'))
+ else:
+ self.assertFalse(self.does_constraint_exist('role',
+ 'ixu_role_name'))
class VersionTests(SqlMigrateBase):
- _initial_db_version = migrate_repo.DB_INIT_VERSION
+ _initial_db_version = migration_helpers.get_init_version()
def test_core_initial(self):
"""Get the version before migrated, it's the initial DB version."""
version = migration_helpers.get_db_version()
- self.assertEqual(migrate_repo.DB_INIT_VERSION, version)
+ self.assertEqual(self._initial_db_version, version)
def test_core_max(self):
"""When get the version after upgrading, it's the new version."""
@@ -793,97 +1181,15 @@ class VersionTests(SqlMigrateBase):
migration_helpers.get_db_version,
extension='federation')
- def test_extension_initial(self):
- """When get the initial version of an extension, it's 0."""
- for name, extension in EXTENSIONS.items():
- abs_path = migration_helpers.find_migrate_repo(extension)
- migration.db_version_control(sql.get_engine(), abs_path)
- version = migration_helpers.get_db_version(extension=name)
- self.assertEqual(0, version,
- 'Migrate version for %s is not 0' % name)
-
- def test_extension_migrated(self):
- """When get the version after migrating an extension, it's not 0."""
- for name, extension in EXTENSIONS.items():
- abs_path = migration_helpers.find_migrate_repo(extension)
- migration.db_version_control(sql.get_engine(), abs_path)
- migration.db_sync(sql.get_engine(), abs_path)
- version = migration_helpers.get_db_version(extension=name)
- self.assertTrue(
- version > 0,
- "Version for %s didn't change after migrated?" % name)
- # Verify downgrades cannot occur
- self.assertRaises(
- db_exception.DbMigrationError,
- migration_helpers._sync_extension_repo,
- extension=name,
- version=0)
-
- def test_extension_federation_upgraded_values(self):
- abs_path = migration_helpers.find_migrate_repo(federation)
- migration.db_version_control(sql.get_engine(), abs_path)
- migration.db_sync(sql.get_engine(), abs_path, version=6)
- idp_table = sqlalchemy.Table("identity_provider",
- self.metadata,
- autoload=True)
- idps = [{'id': uuid.uuid4().hex,
- 'enabled': True,
- 'description': uuid.uuid4().hex,
- 'remote_id': uuid.uuid4().hex},
- {'id': uuid.uuid4().hex,
- 'enabled': True,
- 'description': uuid.uuid4().hex,
- 'remote_id': uuid.uuid4().hex}]
- for idp in idps:
- ins = idp_table.insert().values({'id': idp['id'],
- 'enabled': idp['enabled'],
- 'description': idp['description'],
- 'remote_id': idp['remote_id']})
- self.engine.execute(ins)
- migration.db_sync(sql.get_engine(), abs_path)
- idp_remote_ids_table = sqlalchemy.Table("idp_remote_ids",
- self.metadata,
- autoload=True)
- for idp in idps:
- s = idp_remote_ids_table.select().where(
- idp_remote_ids_table.c.idp_id == idp['id'])
- remote = self.engine.execute(s).fetchone()
- self.assertEqual(idp['remote_id'],
- remote['remote_id'],
- 'remote_ids must be preserved during the '
- 'migration from identity_provider table to '
- 'idp_remote_ids table')
-
def test_unexpected_extension(self):
- """The version for an extension that doesn't exist raises ImportError.
-
- """
-
+ """The version for a non-existent extension raises ImportError."""
extension_name = uuid.uuid4().hex
self.assertRaises(ImportError,
migration_helpers.get_db_version,
extension=extension_name)
def test_unversioned_extension(self):
- """The version for extensions without migrations raise an exception.
-
- """
-
+ """The version for extensions without migrations raise an exception."""
self.assertRaises(exception.MigrationNotProvided,
migration_helpers.get_db_version,
extension='admin_crud')
-
- def test_initial_with_extension_version_None(self):
- """When performing a default migration, also migrate extensions."""
- migration_helpers.sync_database_to_version(extension=None,
- version=None)
- for table in INITIAL_EXTENSION_TABLE_STRUCTURE:
- self.assertTableColumns(table,
- INITIAL_EXTENSION_TABLE_STRUCTURE[table])
-
- def test_initial_with_extension_version_max(self):
- """When migrating to max version, do not migrate extensions."""
- migration_helpers.sync_database_to_version(extension=None,
- version=self.max_version)
- for table in INITIAL_EXTENSION_TABLE_STRUCTURE:
- self.assertTableDoesNotExist(table)
diff --git a/keystone-moon/keystone/tests/unit/test_token_provider.py b/keystone-moon/keystone/tests/unit/test_token_provider.py
index f60f7d53..5c71363b 100644
--- a/keystone-moon/keystone/tests/unit/test_token_provider.py
+++ b/keystone-moon/keystone/tests/unit/test_token_provider.py
@@ -16,6 +16,7 @@ import datetime
from oslo_config import cfg
from oslo_utils import timeutils
+from six.moves import reload_module
from keystone.common import dependency
from keystone.common import utils
@@ -781,6 +782,12 @@ class TestTokenProvider(unit.TestCase):
self.assertIsNone(
self.token_provider_api._is_valid_token(create_v3_token()))
+ def test_no_token_raises_token_not_found(self):
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ None)
+
# NOTE(ayoung): renamed to avoid automatic test detection
class PKIProviderTests(object):
@@ -803,7 +810,8 @@ class PKIProviderTests(object):
self.cms.subprocess = self.target_subprocess
self.environment.subprocess = self.target_subprocess
- reload(pki) # force module reload so the imports get re-evaluated
+ # force module reload so the imports get re-evaluated
+ reload_module(pki)
def test_get_token_id_error_handling(self):
# cause command-line failure
diff --git a/keystone-moon/keystone/tests/unit/test_url_middleware.py b/keystone-moon/keystone/tests/unit/test_url_middleware.py
index 217b302d..3b160b93 100644
--- a/keystone-moon/keystone/tests/unit/test_url_middleware.py
+++ b/keystone-moon/keystone/tests/unit/test_url_middleware.py
@@ -20,6 +20,7 @@ from keystone.tests import unit
class FakeApp(object):
"""Fakes a WSGI app URL normalized."""
+
def __call__(self, env, start_response):
resp = webob.Response()
resp.body = 'SUCCESS'
diff --git a/keystone-moon/keystone/tests/unit/test_v2.py b/keystone-moon/keystone/tests/unit/test_v2.py
index acdfca5f..e81c6040 100644
--- a/keystone-moon/keystone/tests/unit/test_v2.py
+++ b/keystone-moon/keystone/tests/unit/test_v2.py
@@ -23,9 +23,11 @@ from six.moves import http_client
from testtools import matchers
from keystone.common import extension as keystone_extension
+from keystone.tests import unit
+from keystone.tests.unit import default_fixtures
from keystone.tests.unit import ksfixtures
from keystone.tests.unit import rest
-
+from keystone.tests.unit.schema import v2
CONF = cfg.CONF
@@ -106,11 +108,11 @@ class CoreApiTests(object):
self.assertValidExtensionListResponse(
r, keystone_extension.ADMIN_EXTENSIONS)
- def test_admin_extensions_404(self):
+ def test_admin_extensions_returns_not_found(self):
self.admin_request(path='/v2.0/extensions/invalid-extension',
expected_status=http_client.NOT_FOUND)
- def test_public_osksadm_extension_404(self):
+ def test_public_osksadm_extension_returns_not_found(self):
self.public_request(path='/v2.0/extensions/OS-KSADM',
expected_status=http_client.NOT_FOUND)
@@ -132,7 +134,7 @@ class CoreApiTests(object):
'tenantId': self.tenant_bar['id'],
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidAuthenticationResponse(r, require_service_catalog=True)
def test_authenticate_unscoped(self):
@@ -147,7 +149,7 @@ class CoreApiTests(object):
},
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidAuthenticationResponse(r)
def test_get_tenants_for_token(self):
@@ -164,7 +166,7 @@ class CoreApiTests(object):
token=token)
self.assertValidAuthenticationResponse(r)
- def test_invalid_token_404(self):
+ def test_invalid_token_returns_not_found(self):
token = self.get_scoped_token()
self.admin_request(
path='/v2.0/tokens/%(token_id)s' % {
@@ -179,7 +181,8 @@ class CoreApiTests(object):
self.tenant_service['id'],
self.role_service['id'])
- token = self.get_scoped_token(tenant_id='service')
+ token = self.get_scoped_token(
+ tenant_id=default_fixtures.SERVICE_TENANT_ID)
r = self.admin_request(
path='/v2.0/tokens/%s' % token,
token=token)
@@ -191,7 +194,8 @@ class CoreApiTests(object):
self.tenant_service['id'],
self.role_service['id'])
- token = self.get_scoped_token(tenant_id='service')
+ token = self.get_scoped_token(
+ tenant_id=default_fixtures.SERVICE_TENANT_ID)
r = self.admin_request(
path='/v2.0/tokens/%s' % token,
token=token)
@@ -234,7 +238,7 @@ class CoreApiTests(object):
'token_id': token,
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
def test_endpoints(self):
token = self.get_scoped_token()
@@ -273,6 +277,14 @@ class CoreApiTests(object):
token=token)
self.assertValidRoleListResponse(r)
+ def test_get_user_roles_without_tenant(self):
+ token = self.get_scoped_token()
+ self.admin_request(
+ path='/v2.0/users/%(user_id)s/roles' % {
+ 'user_id': self.user_foo['id'],
+ },
+ token=token, expected_status=http_client.NOT_IMPLEMENTED)
+
def test_get_user(self):
token = self.get_scoped_token()
r = self.admin_request(
@@ -370,7 +382,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
def test_error_response(self):
"""This triggers assertValidErrorResponse by convention."""
@@ -459,7 +471,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
user_id = self._get_user_id(r.result)
@@ -470,7 +482,7 @@ class CoreApiTests(object):
'user_id': user_id
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
self.assertEqual(CONF.member_role_name, self._get_role_name(r.result))
# Create a new tenant
@@ -485,7 +497,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
project_id = self._get_project_id(r.result)
@@ -501,7 +513,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
# 'member_role' should be in new_tenant
r = self.admin_request(
@@ -510,7 +522,7 @@ class CoreApiTests(object):
'user_id': user_id
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
self.assertEqual('_member_', self._get_role_name(r.result))
# 'member_role' should not be in tenant_bar any more
@@ -520,7 +532,7 @@ class CoreApiTests(object):
'user_id': user_id
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
self.assertNoRoles(r.result)
def test_update_user_with_invalid_tenant(self):
@@ -539,7 +551,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
user_id = self._get_user_id(r.result)
# Update user with an invalid tenant
@@ -571,7 +583,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
user_id = self._get_user_id(r.result)
# Update user with an invalid tenant
@@ -604,7 +616,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
user_id = self._get_user_id(r.result)
@@ -615,7 +627,7 @@ class CoreApiTests(object):
'user_id': user_id
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
self.assertEqual(CONF.member_role_name, self._get_role_name(r.result))
# Update user's tenant with old tenant id
@@ -630,7 +642,7 @@ class CoreApiTests(object):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
# 'member_role' should still be in tenant_bar
r = self.admin_request(
@@ -639,7 +651,7 @@ class CoreApiTests(object):
'user_id': user_id
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
self.assertEqual('_member_', self._get_role_name(r.result))
def test_authenticating_a_user_with_no_password(self):
@@ -721,7 +733,7 @@ class LegacyV2UsernameTests(object):
path='/v2.0/users',
token=token,
body=body,
- expected_status=200)
+ expected_status=http_client.OK)
def test_create_with_extra_username(self):
"""The response for creating a user will contain the extra fields."""
@@ -772,7 +784,7 @@ class LegacyV2UsernameTests(object):
'enabled': enabled,
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidUserResponse(r)
@@ -802,7 +814,7 @@ class LegacyV2UsernameTests(object):
'enabled': enabled,
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidUserResponse(r)
@@ -881,7 +893,7 @@ class LegacyV2UsernameTests(object):
'enabled': enabled,
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidUserResponse(r)
@@ -911,7 +923,7 @@ class LegacyV2UsernameTests(object):
'enabled': enabled,
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidUserResponse(r)
@@ -931,7 +943,7 @@ class LegacyV2UsernameTests(object):
'enabled': True,
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidUserResponse(r)
@@ -956,7 +968,7 @@ class LegacyV2UsernameTests(object):
'enabled': enabled,
},
},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidUserResponse(r)
@@ -979,6 +991,14 @@ class RestfulTestCase(rest.RestfulTestCase):
class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
+
+ def config_overrides(self):
+ super(V2TestCase, self).config_overrides()
+ self.config_fixture.config(
+ group='catalog',
+ driver='templated',
+ template_file=unit.dirs.tests('default_catalog.templates'))
+
def _get_user_id(self, r):
return r['user']['id']
@@ -1200,7 +1220,7 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
method='GET',
path='/v2.0/tokens/revoked',
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidRevocationListResponse(r)
def assertValidRevocationListResponse(self, response):
@@ -1231,7 +1251,7 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
method='GET',
path='/v2.0/tokens/revoked',
token=token1,
- expected_status=200)
+ expected_status=http_client.OK)
signed_text = r.result['signed']
data_json = cms.cms_verify(signed_text, CONF.signing.certfile,
@@ -1242,10 +1262,11 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
return (data, token2)
def test_fetch_revocation_list_md5(self):
- """If the server is configured for md5, then the revocation list has
- tokens hashed with MD5.
- """
+ """Hash for tokens in revocation list and server config should match.
+ If the server is configured for md5, then the revocation list has
+ tokens hashed with MD5.
+ """
# The default hash algorithm is md5.
hash_algorithm = 'md5'
@@ -1254,10 +1275,11 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
self.assertThat(token_hash, matchers.Equals(data['revoked'][0]['id']))
def test_fetch_revocation_list_sha256(self):
- """If the server is configured for sha256, then the revocation list has
- tokens hashed with SHA256
- """
+ """Hash for tokens in revocation list and server config should match.
+ If the server is configured for sha256, then the revocation list has
+ tokens hashed with SHA256.
+ """
hash_algorithm = 'sha256'
self.config_fixture.config(group='token',
hash_algorithm=hash_algorithm)
@@ -1333,7 +1355,7 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
},
},
},
- expected_status=200)
+ expected_status=http_client.OK)
# ensure password doesn't leak
user_id = r.result['user']['id']
@@ -1341,7 +1363,7 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
method='GET',
path='/v2.0/users/%s' % user_id,
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
self.assertNotIn('OS-KSADM:password', r.result['user'])
def test_updating_a_user_with_an_OSKSADM_password(self):
@@ -1360,7 +1382,7 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
},
},
token=token,
- expected_status=200)
+ expected_status=http_client.OK)
# successfully authenticate
self.public_request(
@@ -1374,13 +1396,12 @@ class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests):
},
},
},
- expected_status=200)
+ expected_status=http_client.OK)
class RevokeApiTestCase(V2TestCase):
def config_overrides(self):
super(RevokeApiTestCase, self).config_overrides()
- self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
provider='pki',
@@ -1402,6 +1423,27 @@ class TestFernetTokenProviderV2(RestfulTestCase):
super(TestFernetTokenProviderV2, self).setUp()
self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+ # Add catalog data
+ self.region = unit.new_region_ref()
+ self.region_id = self.region['id']
+ self.catalog_api.create_region(self.region)
+
+ self.service = unit.new_service_ref()
+ self.service_id = self.service['id']
+ self.catalog_api.create_service(self.service_id, self.service)
+
+ self.endpoint = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
+ self.endpoint_id = self.endpoint['id']
+ self.catalog_api.create_endpoint(self.endpoint_id, self.endpoint)
+
+ def assertValidUnscopedTokenResponse(self, r):
+ v2.unscoped_validator.validate(r.json['access'])
+
+ def assertValidScopedTokenResponse(self, r):
+ v2.scoped_validator.validate(r.json['access'])
+
# Used by RestfulTestCase
def _get_token_id(self, r):
return r.result['access']['token']['id']
@@ -1432,11 +1474,12 @@ class TestFernetTokenProviderV2(RestfulTestCase):
admin_token = self.get_scoped_token(tenant_id=project_ref['id'])
unscoped_token = self.get_unscoped_token()
path = ('/v2.0/tokens/%s' % unscoped_token)
- self.admin_request(
+ resp = self.admin_request(
method='GET',
path=path,
token=admin_token,
- expected_status=200)
+ expected_status=http_client.OK)
+ self.assertValidUnscopedTokenResponse(resp)
def test_authenticate_scoped_token(self):
project_ref = self.new_project_ref()
@@ -1462,11 +1505,12 @@ class TestFernetTokenProviderV2(RestfulTestCase):
path = ('/v2.0/tokens/%s?belongsTo=%s' % (member_token,
project2_ref['id']))
# Validate token belongs to project
- self.admin_request(
+ resp = self.admin_request(
method='GET',
path=path,
token=admin_token,
- expected_status=200)
+ expected_status=http_client.OK)
+ self.assertValidScopedTokenResponse(resp)
def test_token_authentication_and_validation(self):
"""Test token authentication for Fernet token provider.
@@ -1491,16 +1535,17 @@ class TestFernetTokenProviderV2(RestfulTestCase):
}
}
},
- expected_status=200)
+ expected_status=http_client.OK)
token_id = self._get_token_id(r)
path = ('/v2.0/tokens/%s?belongsTo=%s' % (token_id, project_ref['id']))
# Validate token belongs to project
- self.admin_request(
+ resp = self.admin_request(
method='GET',
path=path,
- token=CONF.admin_token,
- expected_status=200)
+ token=self.get_admin_token(),
+ expected_status=http_client.OK)
+ self.assertValidScopedTokenResponse(resp)
def test_rescoped_tokens_maintain_original_expiration(self):
project_ref = self.new_project_ref()
@@ -1522,7 +1567,7 @@ class TestFernetTokenProviderV2(RestfulTestCase):
},
# NOTE(lbragstad): This test may need to be refactored if Keystone
# decides to disallow rescoping using a scoped token.
- expected_status=200)
+ expected_status=http_client.OK)
original_token = resp.result['access']['token']['id']
original_expiration = resp.result['access']['token']['expires']
@@ -1537,8 +1582,9 @@ class TestFernetTokenProviderV2(RestfulTestCase):
}
}
},
- expected_status=200)
+ expected_status=http_client.OK)
rescoped_token = resp.result['access']['token']['id']
rescoped_expiration = resp.result['access']['token']['expires']
self.assertNotEqual(original_token, rescoped_token)
self.assertEqual(original_expiration, rescoped_expiration)
+ self.assertValidScopedTokenResponse(resp)
diff --git a/keystone-moon/keystone/tests/unit/test_v2_controller.py b/keystone-moon/keystone/tests/unit/test_v2_controller.py
index 581e6b9c..6cf8bc53 100644
--- a/keystone-moon/keystone/tests/unit/test_v2_controller.py
+++ b/keystone-moon/keystone/tests/unit/test_v2_controller.py
@@ -13,8 +13,11 @@
# under the License.
+import copy
import uuid
+from testtools import matchers
+
from keystone.assignment import controllers as assignment_controllers
from keystone import exception
from keystone.resource import controllers as resource_controllers
@@ -32,6 +35,7 @@ class TenantTestCase(unit.TestCase):
These tests exercise :class:`keystone.assignment.controllers.Tenant`.
"""
+
def setUp(self):
super(TenantTestCase, self).setUp()
self.useFixture(database.Database())
@@ -73,17 +77,18 @@ class TenantTestCase(unit.TestCase):
def test_list_projects_default_domain(self):
"""Test that list projects only returns those in the default domain."""
-
- domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'enabled': True}
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain['id']}
+ project1 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
- # Check the real total number of projects, we should have the above
- # plus those in the default features
+ # Check the real total number of projects, we should have the:
+ # - tenants in the default fixtures
+ # - the project representing the default domain
+ # - the project representing the domain we created above
+ # - the project we created above
refs = self.resource_api.list_projects()
- self.assertEqual(len(default_fixtures.TENANTS) + 1, len(refs))
+ self.assertThat(
+ refs, matchers.HasLength(len(default_fixtures.TENANTS) + 3))
# Now list all projects using the v2 API - we should only get
# back those in the default features, since only those are in the
@@ -98,11 +103,52 @@ class TenantTestCase(unit.TestCase):
self.assertIn(tenant_copy, refs['tenants'])
def _create_is_domain_project(self):
- project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': 'default', 'is_domain': True}
+ project = unit.new_project_ref(is_domain=True)
project_ref = self.resource_api.create_project(project['id'], project)
return self.tenant_controller.v3_to_v2_project(project_ref)
+ def test_get_is_domain_project_not_found(self):
+ """Test that get project does not return is_domain projects."""
+ project = self._create_is_domain_project()
+
+ context = copy.deepcopy(_ADMIN_CONTEXT)
+ context['query_string']['name'] = project['name']
+
+ self.assertRaises(
+ exception.ProjectNotFound,
+ self.tenant_controller.get_all_projects,
+ context)
+
+ context = copy.deepcopy(_ADMIN_CONTEXT)
+ context['query_string']['name'] = project['id']
+
+ self.assertRaises(
+ exception.ProjectNotFound,
+ self.tenant_controller.get_all_projects,
+ context)
+
+ def test_create_is_domain_project_fails(self):
+ """Test that the creation of a project acting as a domain fails."""
+ project = {'name': uuid.uuid4().hex, 'domain_id': 'default',
+ 'is_domain': True}
+
+ self.assertRaises(
+ exception.ValidationError,
+ self.tenant_controller.create_project,
+ _ADMIN_CONTEXT,
+ project)
+
+ def test_create_project_passing_is_domain_false_fails(self):
+ """Test that passing is_domain=False is not allowed."""
+ project = {'name': uuid.uuid4().hex, 'domain_id': 'default',
+ 'is_domain': False}
+
+ self.assertRaises(
+ exception.ValidationError,
+ self.tenant_controller.create_project,
+ _ADMIN_CONTEXT,
+ project)
+
def test_update_is_domain_project_not_found(self):
"""Test that update is_domain project is not allowed in v2."""
project = self._create_is_domain_project()
@@ -113,8 +159,7 @@ class TenantTestCase(unit.TestCase):
self.tenant_controller.update_project,
_ADMIN_CONTEXT,
project['id'],
- project
- )
+ project)
def test_delete_is_domain_project_not_found(self):
"""Test that delete is_domain project is not allowed in v2."""
@@ -124,14 +169,12 @@ class TenantTestCase(unit.TestCase):
exception.ProjectNotFound,
self.tenant_controller.delete_project,
_ADMIN_CONTEXT,
- project['id']
- )
+ project['id'])
def test_list_is_domain_project_not_found(self):
"""Test v2 get_all_projects having projects that act as a domain.
- In v2 no project with the is_domain flag enabled should be
- returned.
+ In v2 no project with the is_domain flag enabled should be returned.
"""
project1 = self._create_is_domain_project()
project2 = self._create_is_domain_project()
diff --git a/keystone-moon/keystone/tests/unit/test_v3.py b/keystone-moon/keystone/tests/unit/test_v3.py
index 32c5e295..216d8c79 100644
--- a/keystone-moon/keystone/tests/unit/test_v3.py
+++ b/keystone-moon/keystone/tests/unit/test_v3.py
@@ -12,20 +12,25 @@
# License for the specific language governing permissions and limitations
# under the License.
-import datetime
import uuid
+import mock
from oslo_config import cfg
+import oslo_context.context
from oslo_serialization import jsonutils
from oslo_utils import timeutils
+from six.moves import http_client
from testtools import matchers
+import webtest
from keystone import auth
from keystone.common import authorization
from keystone.common import cache
+from keystone.common.validation import validators
from keystone import exception
from keystone import middleware
-from keystone.policy.backends import rules
+from keystone.middleware import auth as middleware_auth
+from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import rest
@@ -38,6 +43,7 @@ TIME_FORMAT = unit.TIME_FORMAT
class AuthTestMixin(object):
"""To hold auth building helper functions."""
+
def build_auth_scope(self, project_id=None, project_name=None,
project_domain_id=None, project_domain_name=None,
domain_id=None, domain_name=None, trust_id=None,
@@ -116,7 +122,127 @@ class AuthTestMixin(object):
class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
- AuthTestMixin):
+ common_auth.AuthTestMixin):
+
+ def generate_token_schema(self, domain_scoped=False, project_scoped=False):
+ """Return a dictionary of token properties to validate against."""
+ properties = {
+ 'audit_ids': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'string',
+ },
+ 'minItems': 1,
+ 'maxItems': 2,
+ },
+ 'bind': {
+ 'type': 'object',
+ 'properties': {
+ 'kerberos': {
+ 'type': 'string',
+ },
+ },
+ 'required': ['kerberos'],
+ 'additionalProperties': False,
+ },
+ 'expires_at': {'type': 'string'},
+ 'issued_at': {'type': 'string'},
+ 'methods': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'string',
+ },
+ },
+ 'user': {
+ 'type': 'object',
+ 'required': ['id', 'name', 'domain'],
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'name': {'type': 'string'},
+ 'domain': {
+ 'type': 'object',
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'name': {'type': 'string'}
+ },
+ 'required': ['id', 'name'],
+ 'additonalProperties': False,
+ }
+ },
+ 'additionalProperties': False,
+ }
+ }
+
+ if domain_scoped:
+ properties['catalog'] = {'type': 'array'}
+ properties['roles'] = {
+ 'type': 'array',
+ 'items': {
+ 'type': 'object',
+ 'properties': {
+ 'id': {'type': 'string', },
+ 'name': {'type': 'string', },
+ },
+ 'required': ['id', 'name', ],
+ 'additionalProperties': False,
+ },
+ 'minItems': 1,
+ }
+ properties['domain'] = {
+ 'domain': {
+ 'type': 'object',
+ 'required': ['id', 'name'],
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'name': {'type': 'string'}
+ },
+ 'additionalProperties': False
+ }
+ }
+ elif project_scoped:
+ properties['is_admin_project'] = {'type': 'boolean'}
+ properties['catalog'] = {'type': 'array'}
+ properties['roles'] = {'type': 'array'}
+ properties['project'] = {
+ 'type': ['object'],
+ 'required': ['id', 'name', 'domain'],
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'name': {'type': 'string'},
+ 'domain': {
+ 'type': ['object'],
+ 'required': ['id', 'name'],
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'name': {'type': 'string'}
+ },
+ 'additionalProperties': False
+ }
+ },
+ 'additionalProperties': False
+ }
+
+ schema = {
+ 'type': 'object',
+ 'properties': properties,
+ 'required': ['audit_ids', 'expires_at', 'issued_at', 'methods',
+ 'user'],
+ 'optional': ['bind'],
+ 'additionalProperties': False
+ }
+
+ if domain_scoped:
+ schema['required'].extend(['domain', 'roles'])
+ schema['optional'].append('catalog')
+ elif project_scoped:
+ schema['required'].append('project')
+ schema['optional'].append('bind')
+ schema['optional'].append('catalog')
+ schema['optional'].append('OS-TRUST:trust')
+ schema['optional'].append('is_admin_project')
+
+ return schema
+
def config_files(self):
config_files = super(RestfulTestCase, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
@@ -146,9 +272,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
pass
def setUp(self, app_conf='keystone'):
- """Setup for v3 Restful Test Cases.
-
- """
+ """Setup for v3 Restful Test Cases."""
new_paste_file = self.generate_paste_config()
self.addCleanup(self.remove_generated_paste_config)
if new_paste_file:
@@ -158,16 +282,9 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
self.empty_context = {'environment': {}}
- # Initialize the policy engine and allow us to write to a temp
- # file in each test to create the policies
- rules.reset()
-
- # drop the policy rules
- self.addCleanup(rules.reset)
-
def load_backends(self):
# ensure the cache region instance is setup
- cache.configure_cache_region(cache.REGION)
+ cache.configure_cache()
super(RestfulTestCase, self).load_backends()
@@ -183,53 +300,42 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
try:
self.resource_api.get_domain(DEFAULT_DOMAIN_ID)
except exception.DomainNotFound:
- domain = {'description': (u'Owns users and tenants (i.e. '
- u'projects) available on Identity '
- u'API v2.'),
- 'enabled': True,
- 'id': DEFAULT_DOMAIN_ID,
- 'name': u'Default'}
+ domain = unit.new_domain_ref(
+ description=(u'The default domain'),
+ id=DEFAULT_DOMAIN_ID,
+ name=u'Default')
self.resource_api.create_domain(DEFAULT_DOMAIN_ID, domain)
def load_sample_data(self):
self._populate_default_domain()
- self.domain_id = uuid.uuid4().hex
- self.domain = self.new_domain_ref()
- self.domain['id'] = self.domain_id
+ self.domain = unit.new_domain_ref()
+ self.domain_id = self.domain['id']
self.resource_api.create_domain(self.domain_id, self.domain)
- self.project_id = uuid.uuid4().hex
- self.project = self.new_project_ref(
- domain_id=self.domain_id)
- self.project['id'] = self.project_id
- self.resource_api.create_project(self.project_id, self.project)
+ self.project = unit.new_project_ref(domain_id=self.domain_id)
+ self.project_id = self.project['id']
+ self.project = self.resource_api.create_project(self.project_id,
+ self.project)
- self.user = self.new_user_ref(domain_id=self.domain_id)
- password = self.user['password']
- self.user = self.identity_api.create_user(self.user)
- self.user['password'] = password
+ self.user = unit.create_user(self.identity_api,
+ domain_id=self.domain_id)
self.user_id = self.user['id']
self.default_domain_project_id = uuid.uuid4().hex
- self.default_domain_project = self.new_project_ref(
+ self.default_domain_project = unit.new_project_ref(
domain_id=DEFAULT_DOMAIN_ID)
self.default_domain_project['id'] = self.default_domain_project_id
self.resource_api.create_project(self.default_domain_project_id,
self.default_domain_project)
- self.default_domain_user = self.new_user_ref(
+ self.default_domain_user = unit.create_user(
+ self.identity_api,
domain_id=DEFAULT_DOMAIN_ID)
- password = self.default_domain_user['password']
- self.default_domain_user = (
- self.identity_api.create_user(self.default_domain_user))
- self.default_domain_user['password'] = password
self.default_domain_user_id = self.default_domain_user['id']
# create & grant policy.json's default role for admin_required
- self.role_id = uuid.uuid4().hex
- self.role = self.new_role_ref()
- self.role['id'] = self.role_id
- self.role['name'] = 'admin'
+ self.role = unit.new_role_ref(name='admin')
+ self.role_id = self.role['id']
self.role_api.create_role(self.role_id, self.role)
self.assignment_api.add_role_to_user_and_project(
self.user_id, self.project_id, self.role_id)
@@ -240,81 +346,35 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
self.default_domain_user_id, self.project_id,
self.role_id)
- self.region_id = uuid.uuid4().hex
- self.region = self.new_region_ref()
- self.region['id'] = self.region_id
- self.catalog_api.create_region(
- self.region.copy())
-
- self.service_id = uuid.uuid4().hex
- self.service = self.new_service_ref()
- self.service['id'] = self.service_id
- self.catalog_api.create_service(
- self.service_id,
- self.service.copy())
-
- self.endpoint_id = uuid.uuid4().hex
- self.endpoint = self.new_endpoint_ref(service_id=self.service_id)
- self.endpoint['id'] = self.endpoint_id
- self.endpoint['region_id'] = self.region['id']
- self.catalog_api.create_endpoint(
- self.endpoint_id,
- self.endpoint.copy())
- # The server adds 'enabled' and defaults to True.
- self.endpoint['enabled'] = True
-
- def new_ref(self):
- """Populates a ref with attributes common to some API entities."""
- return unit.new_ref()
-
- def new_region_ref(self):
- return unit.new_region_ref()
-
- def new_service_ref(self):
- return unit.new_service_ref()
-
- def new_endpoint_ref(self, service_id, interface='public', **kwargs):
- return unit.new_endpoint_ref(
- service_id, interface=interface, default_region_id=self.region_id,
- **kwargs)
-
- def new_domain_ref(self):
- return unit.new_domain_ref()
-
- def new_project_ref(self, domain_id=None, parent_id=None, is_domain=False):
- return unit.new_project_ref(domain_id=domain_id, parent_id=parent_id,
- is_domain=is_domain)
-
- def new_user_ref(self, domain_id, project_id=None):
- return unit.new_user_ref(domain_id, project_id=project_id)
-
- def new_group_ref(self, domain_id):
- return unit.new_group_ref(domain_id)
-
- def new_credential_ref(self, user_id, project_id=None, cred_type=None):
- return unit.new_credential_ref(user_id, project_id=project_id,
- cred_type=cred_type)
+ # Create "req_admin" user for simulating a real user instead of the
+ # admin_token_auth middleware
+ self.user_reqadmin = unit.create_user(self.identity_api,
+ DEFAULT_DOMAIN_ID)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user_reqadmin['id'],
+ self.default_domain_project_id,
+ self.role_id)
- def new_role_ref(self):
- return unit.new_role_ref()
+ self.region = unit.new_region_ref()
+ self.region_id = self.region['id']
+ self.catalog_api.create_region(self.region)
- def new_policy_ref(self):
- return unit.new_policy_ref()
+ self.service = unit.new_service_ref()
+ self.service_id = self.service['id']
+ self.catalog_api.create_service(self.service_id, self.service.copy())
- def new_trust_ref(self, trustor_user_id, trustee_user_id, project_id=None,
- impersonation=None, expires=None, role_ids=None,
- role_names=None, remaining_uses=None,
- allow_redelegation=False):
- return unit.new_trust_ref(
- trustor_user_id, trustee_user_id, project_id=project_id,
- impersonation=impersonation, expires=expires, role_ids=role_ids,
- role_names=role_names, remaining_uses=remaining_uses,
- allow_redelegation=allow_redelegation)
+ self.endpoint = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
+ self.endpoint_id = self.endpoint['id']
+ self.catalog_api.create_endpoint(self.endpoint_id,
+ self.endpoint.copy())
+ # The server adds 'enabled' and defaults to True.
+ self.endpoint['enabled'] = True
def create_new_default_project_for_user(self, user_id, domain_id,
enable_project=True):
- ref = self.new_project_ref(domain_id=domain_id)
- ref['enabled'] = enable_project
+ ref = unit.new_project_ref(domain_id=domain_id, enabled=enable_project)
r = self.post('/projects', body={'project': ref})
project = self.assertValidProjectResponse(r, ref)
# set the user's preferred project
@@ -326,6 +386,34 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
return project
+ def get_admin_token(self):
+ """Convenience method so that we can test authenticated requests."""
+ r = self.admin_request(
+ method='POST',
+ path='/v3/auth/tokens',
+ body={
+ 'auth': {
+ 'identity': {
+ 'methods': ['password'],
+ 'password': {
+ 'user': {
+ 'name': self.user_reqadmin['name'],
+ 'password': self.user_reqadmin['password'],
+ 'domain': {
+ 'id': self.user_reqadmin['domain_id']
+ }
+ }
+ }
+ },
+ 'scope': {
+ 'project': {
+ 'id': self.default_domain_project_id,
+ }
+ }
+ }
+ })
+ return r.headers.get('X-Subject-Token')
+
def get_unscoped_token(self):
"""Convenience method so that we can test authenticated requests."""
r = self.admin_request(
@@ -407,11 +495,10 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
def get_requested_token(self, auth):
"""Request the specific token we want."""
-
- r = self.v3_authenticate_token(auth)
+ r = self.v3_create_token(auth)
return r.headers.get('X-Subject-Token')
- def v3_authenticate_token(self, auth, expected_status=201):
+ def v3_create_token(self, auth, expected_status=http_client.CREATED):
return self.admin_request(method='POST',
path='/v3/auth/tokens',
body=auth,
@@ -440,42 +527,31 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
return self.admin_request(path=path, token=token, **kwargs)
- def get(self, path, **kwargs):
- r = self.v3_request(method='GET', path=path, **kwargs)
- if 'expected_status' not in kwargs:
- self.assertResponseStatus(r, 200)
- return r
+ def get(self, path, expected_status=http_client.OK, **kwargs):
+ return self.v3_request(path, method='GET',
+ expected_status=expected_status, **kwargs)
- def head(self, path, **kwargs):
- r = self.v3_request(method='HEAD', path=path, **kwargs)
- if 'expected_status' not in kwargs:
- self.assertResponseStatus(r, 204)
- self.assertEqual('', r.body)
+ def head(self, path, expected_status=http_client.NO_CONTENT, **kwargs):
+ r = self.v3_request(path, method='HEAD',
+ expected_status=expected_status, **kwargs)
+ self.assertEqual(b'', r.body)
return r
- def post(self, path, **kwargs):
- r = self.v3_request(method='POST', path=path, **kwargs)
- if 'expected_status' not in kwargs:
- self.assertResponseStatus(r, 201)
- return r
+ def post(self, path, expected_status=http_client.CREATED, **kwargs):
+ return self.v3_request(path, method='POST',
+ expected_status=expected_status, **kwargs)
- def put(self, path, **kwargs):
- r = self.v3_request(method='PUT', path=path, **kwargs)
- if 'expected_status' not in kwargs:
- self.assertResponseStatus(r, 204)
- return r
+ def put(self, path, expected_status=http_client.NO_CONTENT, **kwargs):
+ return self.v3_request(path, method='PUT',
+ expected_status=expected_status, **kwargs)
- def patch(self, path, **kwargs):
- r = self.v3_request(method='PATCH', path=path, **kwargs)
- if 'expected_status' not in kwargs:
- self.assertResponseStatus(r, 200)
- return r
+ def patch(self, path, expected_status=http_client.OK, **kwargs):
+ return self.v3_request(path, method='PATCH',
+ expected_status=expected_status, **kwargs)
- def delete(self, path, **kwargs):
- r = self.v3_request(method='DELETE', path=path, **kwargs)
- if 'expected_status' not in kwargs:
- self.assertResponseStatus(r, 204)
- return r
+ def delete(self, path, expected_status=http_client.NO_CONTENT, **kwargs):
+ return self.v3_request(path, method='DELETE',
+ expected_status=expected_status, **kwargs)
def assertValidErrorResponse(self, r):
resp = r.result
@@ -582,7 +658,6 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
except Exception:
msg = '%s is not a valid ISO 8601 extended format date time.' % dt
raise AssertionError(msg)
- self.assertIsInstance(dt, datetime.datetime)
def assertValidTokenResponse(self, r, user=None):
self.assertTrue(r.headers.get('X-Subject-Token'))
@@ -611,11 +686,10 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
def assertValidUnscopedTokenResponse(self, r, *args, **kwargs):
token = self.assertValidTokenResponse(r, *args, **kwargs)
-
- self.assertNotIn('roles', token)
- self.assertNotIn('catalog', token)
- self.assertNotIn('project', token)
- self.assertNotIn('domain', token)
+ validator_object = validators.SchemaValidator(
+ self.generate_token_schema()
+ )
+ validator_object.validate(token)
return token
@@ -623,6 +697,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
require_catalog = kwargs.pop('require_catalog', True)
endpoint_filter = kwargs.pop('endpoint_filter', False)
ep_filter_assoc = kwargs.pop('ep_filter_assoc', 0)
+ is_admin_project = kwargs.pop('is_admin_project', False)
token = self.assertValidTokenResponse(r, *args, **kwargs)
if require_catalog:
@@ -650,40 +725,66 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
self.assertIn('id', role)
self.assertIn('name', role)
+ if is_admin_project:
+ # NOTE(samueldmq): We want to explicitly test for boolean
+ self.assertIs(True, token['is_admin_project'])
+ else:
+ self.assertNotIn('is_admin_project', token)
+
return token
def assertValidProjectScopedTokenResponse(self, r, *args, **kwargs):
token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
- self.assertIn('project', token)
- self.assertIn('id', token['project'])
- self.assertIn('name', token['project'])
- self.assertIn('domain', token['project'])
- self.assertIn('id', token['project']['domain'])
- self.assertIn('name', token['project']['domain'])
+ project_scoped_token_schema = self.generate_token_schema(
+ project_scoped=True)
+
+ if token.get('OS-TRUST:trust'):
+ trust_properties = {
+ 'OS-TRUST:trust': {
+ 'type': ['object'],
+ 'required': ['id', 'impersonation', 'trustor_user',
+ 'trustee_user'],
+ 'properties': {
+ 'id': {'type': 'string'},
+ 'impersonation': {'type': 'boolean'},
+ 'trustor_user': {
+ 'type': 'object',
+ 'required': ['id'],
+ 'properties': {
+ 'id': {'type': 'string'}
+ },
+ 'additionalProperties': False
+ },
+ 'trustee_user': {
+ 'type': 'object',
+ 'required': ['id'],
+ 'properties': {
+ 'id': {'type': 'string'}
+ },
+ 'additionalProperties': False
+ }
+ },
+ 'additionalProperties': False
+ }
+ }
+ project_scoped_token_schema['properties'].update(trust_properties)
+
+ validator_object = validators.SchemaValidator(
+ project_scoped_token_schema)
+ validator_object.validate(token)
self.assertEqual(self.role_id, token['roles'][0]['id'])
return token
- def assertValidProjectTrustScopedTokenResponse(self, r, *args, **kwargs):
- token = self.assertValidProjectScopedTokenResponse(r, *args, **kwargs)
-
- trust = token.get('OS-TRUST:trust')
- self.assertIsNotNone(trust)
- self.assertIsNotNone(trust.get('id'))
- self.assertIsInstance(trust.get('impersonation'), bool)
- self.assertIsNotNone(trust.get('trustor_user'))
- self.assertIsNotNone(trust.get('trustee_user'))
- self.assertIsNotNone(trust['trustor_user'].get('id'))
- self.assertIsNotNone(trust['trustee_user'].get('id'))
-
def assertValidDomainScopedTokenResponse(self, r, *args, **kwargs):
token = self.assertValidScopedTokenResponse(r, *args, **kwargs)
- self.assertIn('domain', token)
- self.assertIn('id', token['domain'])
- self.assertIn('name', token['domain'])
+ validator_object = validators.SchemaValidator(
+ self.generate_token_schema(domain_scoped=True)
+ )
+ validator_object.validate(token)
return token
@@ -876,7 +977,6 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
**kwargs)
def assertValidProject(self, entity, ref=None):
- self.assertIsNotNone(entity.get('domain_id'))
if ref:
self.assertEqual(ref['domain_id'], entity['domain_id'])
return entity
@@ -888,6 +988,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
resp,
'users',
self.assertValidUser,
+ keys_to_check=['name', 'enabled'],
*args,
**kwargs)
@@ -896,6 +997,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
resp,
'user',
self.assertValidUser,
+ keys_to_check=['name', 'enabled'],
*args,
**kwargs)
@@ -920,6 +1022,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
resp,
'groups',
self.assertValidGroup,
+ keys_to_check=['name', 'description', 'domain_id'],
*args,
**kwargs)
@@ -928,6 +1031,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
resp,
'group',
self.assertValidGroup,
+ keys_to_check=['name', 'description', 'domain_id'],
*args,
**kwargs)
@@ -979,6 +1083,21 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
*args,
**kwargs)
+ def assertRoleInListResponse(self, resp, ref, expected=1):
+ found_count = 0
+ for entity in resp.result.get('roles'):
+ try:
+ self.assertValidRole(entity, ref=ref)
+ except Exception:
+ # It doesn't match, so let's go onto the next one
+ pass
+ else:
+ found_count += 1
+ self.assertEqual(expected, found_count)
+
+ def assertRoleNotInListResponse(self, resp, ref):
+ self.assertRoleInListResponse(resp, ref=ref, expected=0)
+
def assertValidRoleResponse(self, resp, *args, **kwargs):
return self.assertValidResponse(
resp,
@@ -992,6 +1111,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
self.assertIsNotNone(entity.get('name'))
if ref:
self.assertEqual(ref['name'], entity['name'])
+ self.assertEqual(ref['domain_id'], entity['domain_id'])
return entity
# role assignment validation
@@ -1161,6 +1281,27 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
return entity
+ # Service providers (federation)
+
+ def assertValidServiceProvider(self, entity, ref=None, *args, **kwargs):
+
+ attributes = frozenset(['auth_url', 'id', 'enabled', 'description',
+ 'links', 'relay_state_prefix', 'sp_url'])
+ for attribute in attributes:
+ self.assertIsNotNone(entity.get(attribute))
+
+ def assertValidServiceProviderListResponse(self, resp, *args, **kwargs):
+ if kwargs.get('keys_to_check') is None:
+ kwargs['keys_to_check'] = ['auth_url', 'id', 'enabled',
+ 'description', 'relay_state_prefix',
+ 'sp_url']
+ return self.assertValidListResponse(
+ resp,
+ 'service_providers',
+ self.assertValidServiceProvider,
+ *args,
+ **kwargs)
+
def build_external_auth_request(self, remote_user,
remote_domain=None, auth_data=None,
kerberos=False):
@@ -1182,24 +1323,81 @@ class VersionTestCase(RestfulTestCase):
pass
+# NOTE(morganfainberg): To be removed when admin_token_auth is removed. This
+# has been split out to allow testing admin_token auth without enabling it
+# for other tests.
+class AuthContextMiddlewareAdminTokenTestCase(RestfulTestCase):
+ EXTENSION_TO_ADD = 'admin_token_auth'
+
+ def config_overrides(self):
+ super(AuthContextMiddlewareAdminTokenTestCase, self).config_overrides()
+ self.config_fixture.config(
+ admin_token='ADMIN')
+
+ # NOTE(morganfainberg): This is knowingly copied from below for simplicity
+ # during the deprecation cycle.
+ def _middleware_request(self, token, extra_environ=None):
+
+ def application(environ, start_response):
+ body = b'body'
+ headers = [('Content-Type', 'text/html; charset=utf8'),
+ ('Content-Length', str(len(body)))]
+ start_response('200 OK', headers)
+ return [body]
+
+ app = webtest.TestApp(middleware.AuthContextMiddleware(application),
+ extra_environ=extra_environ)
+ resp = app.get('/', headers={middleware.AUTH_TOKEN_HEADER: token})
+ self.assertEqual('body', resp.text) # just to make sure it worked
+ return resp.request
+
+ def test_admin_auth_context(self):
+ # test to make sure AuthContextMiddleware does not attempt to build the
+ # auth context if the admin_token middleware indicates it's admin
+ # already.
+ token_id = uuid.uuid4().hex # token doesn't matter.
+ # the admin_token middleware sets is_admin in the context.
+ extra_environ = {middleware.CONTEXT_ENV: {'is_admin': True}}
+ req = self._middleware_request(token_id, extra_environ)
+ auth_context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
+ self.assertDictEqual({}, auth_context)
+
+ @mock.patch.object(middleware_auth.versionutils,
+ 'report_deprecated_feature')
+ def test_admin_token_auth_context_deprecated(self, mock_report_deprecated):
+ # For backwards compatibility AuthContextMiddleware will check that the
+ # admin token (as configured in the CONF file) is present and not
+ # attempt to build the auth context. This is deprecated.
+ req = self._middleware_request('ADMIN')
+ auth_context = req.environ.get(authorization.AUTH_CONTEXT_ENV)
+ self.assertDictEqual({}, auth_context)
+ self.assertEqual(1, mock_report_deprecated.call_count)
+
+
# NOTE(gyee): test AuthContextMiddleware here instead of test_middleware.py
# because we need the token
class AuthContextMiddlewareTestCase(RestfulTestCase):
- def _mock_request_object(self, token_id):
- class fake_req(object):
- headers = {middleware.AUTH_TOKEN_HEADER: token_id}
- environ = {}
+ def _middleware_request(self, token, extra_environ=None):
+
+ def application(environ, start_response):
+ body = b'body'
+ headers = [('Content-Type', 'text/html; charset=utf8'),
+ ('Content-Length', str(len(body)))]
+ start_response('200 OK', headers)
+ return [body]
- return fake_req()
+ app = webtest.TestApp(middleware.AuthContextMiddleware(application),
+ extra_environ=extra_environ)
+ resp = app.get('/', headers={middleware.AUTH_TOKEN_HEADER: token})
+ self.assertEqual(b'body', resp.body) # just to make sure it worked
+ return resp.request
def test_auth_context_build_by_middleware(self):
# test to make sure AuthContextMiddleware successful build the auth
# context from the incoming auth token
admin_token = self.get_scoped_token()
- req = self._mock_request_object(admin_token)
- application = None
- middleware.AuthContextMiddleware(application).process_request(req)
+ req = self._middleware_request(admin_token)
self.assertEqual(
self.user['id'],
req.environ.get(authorization.AUTH_CONTEXT_ENV)['user_id'])
@@ -1208,28 +1406,16 @@ class AuthContextMiddlewareTestCase(RestfulTestCase):
overridden_context = 'OVERRIDDEN_CONTEXT'
# this token should not be used
token = uuid.uuid4().hex
- req = self._mock_request_object(token)
- req.environ[authorization.AUTH_CONTEXT_ENV] = overridden_context
- application = None
- middleware.AuthContextMiddleware(application).process_request(req)
+
+ extra_environ = {authorization.AUTH_CONTEXT_ENV: overridden_context}
+ req = self._middleware_request(token, extra_environ=extra_environ)
# make sure overridden context take precedence
self.assertEqual(overridden_context,
req.environ.get(authorization.AUTH_CONTEXT_ENV))
- def test_admin_token_auth_context(self):
- # test to make sure AuthContextMiddleware does not attempt to build
- # auth context if the incoming auth token is the special admin token
- req = self._mock_request_object(CONF.admin_token)
- application = None
- middleware.AuthContextMiddleware(application).process_request(req)
- self.assertDictEqual(req.environ.get(authorization.AUTH_CONTEXT_ENV),
- {})
-
def test_unscoped_token_auth_context(self):
unscoped_token = self.get_unscoped_token()
- req = self._mock_request_object(unscoped_token)
- application = None
- middleware.AuthContextMiddleware(application).process_request(req)
+ req = self._middleware_request(unscoped_token)
for key in ['project_id', 'domain_id', 'domain_name']:
self.assertNotIn(
key,
@@ -1237,9 +1423,7 @@ class AuthContextMiddlewareTestCase(RestfulTestCase):
def test_project_scoped_token_auth_context(self):
project_scoped_token = self.get_scoped_token()
- req = self._mock_request_object(project_scoped_token)
- application = None
- middleware.AuthContextMiddleware(application).process_request(req)
+ req = self._middleware_request(project_scoped_token)
self.assertEqual(
self.project['id'],
req.environ.get(authorization.AUTH_CONTEXT_ENV)['project_id'])
@@ -1251,9 +1435,7 @@ class AuthContextMiddlewareTestCase(RestfulTestCase):
self.put(path=path)
domain_scoped_token = self.get_domain_scoped_token()
- req = self._mock_request_object(domain_scoped_token)
- application = None
- middleware.AuthContextMiddleware(application).process_request(req)
+ req = self._middleware_request(domain_scoped_token)
self.assertEqual(
self.domain['id'],
req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_id'])
@@ -1261,6 +1443,30 @@ class AuthContextMiddlewareTestCase(RestfulTestCase):
self.domain['name'],
req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_name'])
+ def test_oslo_context(self):
+ # After AuthContextMiddleware runs, an
+ # oslo_context.context.RequestContext was created so that its fields
+ # can be logged. This test validates that the RequestContext was
+ # created and the fields are set as expected.
+
+ # Use a scoped token so more fields can be set.
+ token = self.get_scoped_token()
+
+ # oslo_middleware RequestId middleware sets openstack.request_id.
+ request_id = uuid.uuid4().hex
+ environ = {'openstack.request_id': request_id}
+ self._middleware_request(token, extra_environ=environ)
+
+ req_context = oslo_context.context.get_current()
+ self.assertEqual(request_id, req_context.request_id)
+ self.assertEqual(token, req_context.auth_token)
+ self.assertEqual(self.user['id'], req_context.user)
+ self.assertEqual(self.project['id'], req_context.tenant)
+ self.assertIsNone(req_context.domain)
+ self.assertEqual(self.user['domain_id'], req_context.user_domain)
+ self.assertEqual(self.project['domain_id'], req_context.project_domain)
+ self.assertFalse(req_context.is_admin)
+
class JsonHomeTestMixin(object):
"""JSON Home test
@@ -1273,6 +1479,7 @@ class JsonHomeTestMixin(object):
data must be in the response.
"""
+
def test_get_json_home(self):
resp = self.get('/', convert=False,
headers={'Accept': 'application/json-home'})
@@ -1295,7 +1502,6 @@ class AssignmentTestMixin(object):
Available filters are: domain_id, project_id, user_id, group_id,
role_id and inherited_to_projects.
"""
-
query_params = '?effective' if effective else ''
for k, v in filters.items():
@@ -1320,7 +1526,6 @@ class AssignmentTestMixin(object):
Provided attributes are expected to contain: domain_id or project_id,
user_id or group_id, role_id and, optionally, inherited_to_projects.
"""
-
if attribs.get('domain_id'):
link = '/domains/' + attribs['domain_id']
else:
@@ -1338,13 +1543,13 @@ class AssignmentTestMixin(object):
return link
- def build_role_assignment_entity(self, link=None, **attribs):
+ def build_role_assignment_entity(
+ self, link=None, prior_role_link=None, **attribs):
"""Build and return a role assignment entity with provided attributes.
Provided attributes are expected to contain: domain_id or project_id,
user_id or group_id, role_id and, optionally, inherited_to_projects.
"""
-
entity = {'links': {'assignment': (
link or self.build_role_assignment_link(**attribs))}}
@@ -1368,4 +1573,68 @@ class AssignmentTestMixin(object):
if attribs.get('inherited_to_projects'):
entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
+ if prior_role_link:
+ entity['links']['prior_role'] = prior_role_link
+
+ return entity
+
+ def build_role_assignment_entity_include_names(self,
+ domain_ref=None,
+ role_ref=None,
+ group_ref=None,
+ user_ref=None,
+ project_ref=None,
+ inherited_assignment=None):
+ """Build and return a role assignment entity with provided attributes.
+
+ The expected attributes are: domain_ref or project_ref,
+ user_ref or group_ref, role_ref and, optionally, inherited_to_projects.
+ """
+ entity = {'links': {}}
+ attributes_for_links = {}
+ if project_ref:
+ dmn_name = self.resource_api.get_domain(
+ project_ref['domain_id'])['name']
+
+ entity['scope'] = {'project': {
+ 'id': project_ref['id'],
+ 'name': project_ref['name'],
+ 'domain': {
+ 'id': project_ref['domain_id'],
+ 'name': dmn_name}}}
+ attributes_for_links['project_id'] = project_ref['id']
+ else:
+ entity['scope'] = {'domain': {'id': domain_ref['id'],
+ 'name': domain_ref['name']}}
+ attributes_for_links['domain_id'] = domain_ref['id']
+ if user_ref:
+ dmn_name = self.resource_api.get_domain(
+ user_ref['domain_id'])['name']
+ entity['user'] = {'id': user_ref['id'],
+ 'name': user_ref['name'],
+ 'domain': {'id': user_ref['domain_id'],
+ 'name': dmn_name}}
+ attributes_for_links['user_id'] = user_ref['id']
+ else:
+ dmn_name = self.resource_api.get_domain(
+ group_ref['domain_id'])['name']
+ entity['group'] = {'id': group_ref['id'],
+ 'name': group_ref['name'],
+ 'domain': {
+ 'id': group_ref['domain_id'],
+ 'name': dmn_name}}
+ attributes_for_links['group_id'] = group_ref['id']
+
+ if role_ref:
+ entity['role'] = {'id': role_ref['id'],
+ 'name': role_ref['name']}
+ attributes_for_links['role_id'] = role_ref['id']
+
+ if inherited_assignment:
+ entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
+ attributes_for_links['inherited_to_projects'] = True
+
+ entity['links']['assignment'] = self.build_role_assignment_link(
+ **attributes_for_links)
+
return entity
diff --git a/keystone-moon/keystone/tests/unit/test_v3_assignment.py b/keystone-moon/keystone/tests/unit/test_v3_assignment.py
index 6b15b1c3..86fb9f74 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_assignment.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_assignment.py
@@ -16,12 +16,10 @@ import uuid
from oslo_config import cfg
from six.moves import http_client
from six.moves import range
+from testtools import matchers
-from keystone.common import controller
-from keystone import exception
from keystone.tests import unit
from keystone.tests.unit import test_v3
-from keystone.tests.unit import utils
CONF = cfg.CONF
@@ -29,1042 +27,20 @@ CONF = cfg.CONF
class AssignmentTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
- """Test domains, projects, roles and role assignments."""
+ """Test roles and role assignments."""
def setUp(self):
super(AssignmentTestCase, self).setUp()
- self.group = self.new_group_ref(
- domain_id=self.domain_id)
+ self.group = unit.new_group_ref(domain_id=self.domain_id)
self.group = self.identity_api.create_group(self.group)
self.group_id = self.group['id']
- self.credential_id = uuid.uuid4().hex
- self.credential = self.new_credential_ref(
- user_id=self.user['id'],
- project_id=self.project_id)
- self.credential['id'] = self.credential_id
- self.credential_api.create_credential(
- self.credential_id,
- self.credential)
-
- # Domain CRUD tests
-
- def test_create_domain(self):
- """Call ``POST /domains``."""
- ref = self.new_domain_ref()
- r = self.post(
- '/domains',
- body={'domain': ref})
- return self.assertValidDomainResponse(r, ref)
-
- def test_create_domain_case_sensitivity(self):
- """Call `POST /domains`` twice with upper() and lower() cased name."""
- ref = self.new_domain_ref()
-
- # ensure the name is lowercase
- ref['name'] = ref['name'].lower()
- r = self.post(
- '/domains',
- body={'domain': ref})
- self.assertValidDomainResponse(r, ref)
-
- # ensure the name is uppercase
- ref['name'] = ref['name'].upper()
- r = self.post(
- '/domains',
- body={'domain': ref})
- self.assertValidDomainResponse(r, ref)
-
- def test_create_domain_bad_request(self):
- """Call ``POST /domains``."""
- self.post('/domains', body={'domain': {}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_list_domains(self):
- """Call ``GET /domains``."""
- resource_url = '/domains'
- r = self.get(resource_url)
- self.assertValidDomainListResponse(r, ref=self.domain,
- resource_url=resource_url)
-
- def test_get_domain(self):
- """Call ``GET /domains/{domain_id}``."""
- r = self.get('/domains/%(domain_id)s' % {
- 'domain_id': self.domain_id})
- self.assertValidDomainResponse(r, self.domain)
-
- def test_update_domain(self):
- """Call ``PATCH /domains/{domain_id}``."""
- ref = self.new_domain_ref()
- del ref['id']
- r = self.patch('/domains/%(domain_id)s' % {
- 'domain_id': self.domain_id},
- body={'domain': ref})
- self.assertValidDomainResponse(r, ref)
-
- def test_disable_domain(self):
- """Call ``PATCH /domains/{domain_id}`` (set enabled=False)."""
- # Create a 2nd set of entities in a 2nd domain
- self.domain2 = self.new_domain_ref()
- self.resource_api.create_domain(self.domain2['id'], self.domain2)
-
- self.project2 = self.new_project_ref(
- domain_id=self.domain2['id'])
- self.resource_api.create_project(self.project2['id'], self.project2)
-
- self.user2 = self.new_user_ref(
- domain_id=self.domain2['id'],
- project_id=self.project2['id'])
- password = self.user2['password']
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
-
- self.assignment_api.add_user_to_project(self.project2['id'],
- self.user2['id'])
-
- # First check a user in that domain can authenticate. The v2 user
- # cannot authenticate because they exist outside the default domain.
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'userId': self.user2['id'],
- 'password': self.user2['password']
- },
- 'tenantId': self.project2['id']
- }
- }
- self.admin_request(
- path='/v2.0/tokens', method='POST', body=body,
- expected_status=http_client.UNAUTHORIZED)
-
- auth_data = self.build_authentication_request(
- user_id=self.user2['id'],
- password=self.user2['password'],
- project_id=self.project2['id'])
- self.v3_authenticate_token(auth_data)
-
- # Now disable the domain
- self.domain2['enabled'] = False
- r = self.patch('/domains/%(domain_id)s' % {
- 'domain_id': self.domain2['id']},
- body={'domain': {'enabled': False}})
- self.assertValidDomainResponse(r, self.domain2)
-
- # Make sure the user can no longer authenticate, via
- # either API
- body = {
- 'auth': {
- 'passwordCredentials': {
- 'userId': self.user2['id'],
- 'password': self.user2['password']
- },
- 'tenantId': self.project2['id']
- }
- }
- self.admin_request(
- path='/v2.0/tokens', method='POST', body=body,
- expected_status=http_client.UNAUTHORIZED)
-
- # Try looking up in v3 by name and id
- auth_data = self.build_authentication_request(
- user_id=self.user2['id'],
- password=self.user2['password'],
- project_id=self.project2['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- auth_data = self.build_authentication_request(
- username=self.user2['name'],
- user_domain_id=self.domain2['id'],
- password=self.user2['password'],
- project_id=self.project2['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_delete_enabled_domain_fails(self):
- """Call ``DELETE /domains/{domain_id}`` (when domain enabled)."""
-
- # Try deleting an enabled domain, which should fail
- self.delete('/domains/%(domain_id)s' % {
- 'domain_id': self.domain['id']},
- expected_status=exception.ForbiddenAction.code)
-
- def test_delete_domain(self):
- """Call ``DELETE /domains/{domain_id}``.
-
- The sample data set up already has a user, group, project
- and credential that is part of self.domain. Since the user
- we will authenticate with is in this domain, we create a
- another set of entities in a second domain. Deleting this
- second domain should delete all these new entities. In addition,
- all the entities in the regular self.domain should be unaffected
- by the delete.
-
- Test Plan:
-
- - Create domain2 and a 2nd set of entities
- - Disable domain2
- - Delete domain2
- - Check entities in domain2 have been deleted
- - Check entities in self.domain are unaffected
-
- """
-
- # Create a 2nd set of entities in a 2nd domain
- self.domain2 = self.new_domain_ref()
- self.resource_api.create_domain(self.domain2['id'], self.domain2)
-
- self.project2 = self.new_project_ref(
- domain_id=self.domain2['id'])
- self.resource_api.create_project(self.project2['id'], self.project2)
-
- self.user2 = self.new_user_ref(
- domain_id=self.domain2['id'],
- project_id=self.project2['id'])
- self.user2 = self.identity_api.create_user(self.user2)
-
- self.group2 = self.new_group_ref(
- domain_id=self.domain2['id'])
- self.group2 = self.identity_api.create_group(self.group2)
-
- self.credential2 = self.new_credential_ref(
- user_id=self.user2['id'],
- project_id=self.project2['id'])
- self.credential_api.create_credential(
- self.credential2['id'],
- self.credential2)
-
- # Now disable the new domain and delete it
- self.domain2['enabled'] = False
- r = self.patch('/domains/%(domain_id)s' % {
- 'domain_id': self.domain2['id']},
- body={'domain': {'enabled': False}})
- self.assertValidDomainResponse(r, self.domain2)
- self.delete('/domains/%(domain_id)s' % {
- 'domain_id': self.domain2['id']})
-
- # Check all the domain2 relevant entities are gone
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- self.domain2['id'])
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- self.project2['id'])
- self.assertRaises(exception.GroupNotFound,
- self.identity_api.get_group,
- self.group2['id'])
- self.assertRaises(exception.UserNotFound,
- self.identity_api.get_user,
- self.user2['id'])
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- self.credential2['id'])
-
- # ...and that all self.domain entities are still here
- r = self.resource_api.get_domain(self.domain['id'])
- self.assertDictEqual(r, self.domain)
- r = self.resource_api.get_project(self.project['id'])
- self.assertDictEqual(r, self.project)
- r = self.identity_api.get_group(self.group['id'])
- self.assertDictEqual(r, self.group)
- r = self.identity_api.get_user(self.user['id'])
- self.user.pop('password')
- self.assertDictEqual(r, self.user)
- r = self.credential_api.get_credential(self.credential['id'])
- self.assertDictEqual(r, self.credential)
-
- def test_delete_default_domain_fails(self):
- # Attempting to delete the default domain results in 403 Forbidden.
-
- # Need to disable it first.
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': CONF.identity.default_domain_id},
- body={'domain': {'enabled': False}})
-
- self.delete('/domains/%(domain_id)s' % {
- 'domain_id': CONF.identity.default_domain_id},
- expected_status=exception.ForbiddenAction.code)
-
- def test_delete_new_default_domain_fails(self):
- # If change the default domain ID, deleting the new default domain
- # results in a 403 Forbidden.
-
- # Create a new domain that's not the default
- new_domain = self.new_domain_ref()
- new_domain_id = new_domain['id']
- self.resource_api.create_domain(new_domain_id, new_domain)
-
- # Disable the new domain so can delete it later.
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': new_domain_id},
- body={'domain': {'enabled': False}})
-
- # Change the default domain
- self.config_fixture.config(group='identity',
- default_domain_id=new_domain_id)
-
- # Attempt to delete the new domain
-
- self.delete('/domains/%(domain_id)s' % {'domain_id': new_domain_id},
- expected_status=exception.ForbiddenAction.code)
-
- def test_delete_old_default_domain(self):
- # If change the default domain ID, deleting the old default domain
- # works.
-
- # Create a new domain that's not the default
- new_domain = self.new_domain_ref()
- new_domain_id = new_domain['id']
- self.resource_api.create_domain(new_domain_id, new_domain)
-
- old_default_domain_id = CONF.identity.default_domain_id
-
- # Disable the default domain so we can delete it later.
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': old_default_domain_id},
- body={'domain': {'enabled': False}})
-
- # Change the default domain
- self.config_fixture.config(group='identity',
- default_domain_id=new_domain_id)
-
- # Delete the old default domain
-
- self.delete(
- '/domains/%(domain_id)s' % {'domain_id': old_default_domain_id})
-
- def test_token_revoked_once_domain_disabled(self):
- """Test token from a disabled domain has been invalidated.
-
- Test that a token that was valid for an enabled domain
- becomes invalid once that domain is disabled.
-
- """
-
- self.domain = self.new_domain_ref()
- self.resource_api.create_domain(self.domain['id'], self.domain)
-
- self.user2 = self.new_user_ref(domain_id=self.domain['id'])
- password = self.user2['password']
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
-
- # build a request body
- auth_body = self.build_authentication_request(
- user_id=self.user2['id'],
- password=self.user2['password'])
-
- # sends a request for the user's token
- token_resp = self.post('/auth/tokens', body=auth_body)
-
- subject_token = token_resp.headers.get('x-subject-token')
-
- # validates the returned token and it should be valid.
- self.head('/auth/tokens',
- headers={'x-subject-token': subject_token},
- expected_status=200)
-
- # now disable the domain
- self.domain['enabled'] = False
- url = "/domains/%(domain_id)s" % {'domain_id': self.domain['id']}
- self.patch(url,
- body={'domain': {'enabled': False}},
- expected_status=200)
-
- # validates the same token again and it should be 'not found'
- # as the domain has already been disabled.
- self.head('/auth/tokens',
- headers={'x-subject-token': subject_token},
- expected_status=http_client.NOT_FOUND)
-
- def test_delete_domain_hierarchy(self):
- """Call ``DELETE /domains/{domain_id}``."""
- domain = self.new_domain_ref()
- self.resource_api.create_domain(domain['id'], domain)
-
- root_project = self.new_project_ref(
- domain_id=domain['id'])
- self.resource_api.create_project(root_project['id'], root_project)
-
- leaf_project = self.new_project_ref(
- domain_id=domain['id'],
- parent_id=root_project['id'])
- self.resource_api.create_project(leaf_project['id'], leaf_project)
-
- # Need to disable it first.
- self.patch('/domains/%(domain_id)s' % {
- 'domain_id': domain['id']},
- body={'domain': {'enabled': False}})
-
- self.delete(
- '/domains/%(domain_id)s' % {
- 'domain_id': domain['id']})
-
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.get_domain,
- domain['id'])
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- root_project['id'])
-
- self.assertRaises(exception.ProjectNotFound,
- self.resource_api.get_project,
- leaf_project['id'])
-
- def test_forbid_operations_on_federated_domain(self):
- """Make sure one cannot operate on federated domain.
-
- This includes operations like create, update, delete
- on domain identified by id and name where difference variations of
- id 'Federated' are used.
-
- """
- def create_domains():
- for variation in ('Federated', 'FEDERATED',
- 'federated', 'fEderated'):
- domain = self.new_domain_ref()
- domain['id'] = variation
- yield domain
-
- for domain in create_domains():
- self.assertRaises(
- AssertionError, self.resource_api.create_domain,
- domain['id'], domain)
- self.assertRaises(
- AssertionError, self.resource_api.update_domain,
- domain['id'], domain)
- self.assertRaises(
- exception.DomainNotFound, self.resource_api.delete_domain,
- domain['id'])
-
- # swap 'name' with 'id' and try again, expecting the request to
- # gracefully fail
- domain['id'], domain['name'] = domain['name'], domain['id']
- self.assertRaises(
- AssertionError, self.resource_api.create_domain,
- domain['id'], domain)
- self.assertRaises(
- AssertionError, self.resource_api.update_domain,
- domain['id'], domain)
- self.assertRaises(
- exception.DomainNotFound, self.resource_api.delete_domain,
- domain['id'])
-
- def test_forbid_operations_on_defined_federated_domain(self):
- """Make sure one cannot operate on a user-defined federated domain.
-
- This includes operations like create, update, delete.
-
- """
-
- non_default_name = 'beta_federated_domain'
- self.config_fixture.config(group='federation',
- federated_domain_name=non_default_name)
- domain = self.new_domain_ref()
- domain['name'] = non_default_name
- self.assertRaises(AssertionError,
- self.resource_api.create_domain,
- domain['id'], domain)
- self.assertRaises(exception.DomainNotFound,
- self.resource_api.delete_domain,
- domain['id'])
- self.assertRaises(AssertionError,
- self.resource_api.update_domain,
- domain['id'], domain)
-
- # Project CRUD tests
-
- def test_list_projects(self):
- """Call ``GET /projects``."""
- resource_url = '/projects'
- r = self.get(resource_url)
- self.assertValidProjectListResponse(r, ref=self.project,
- resource_url=resource_url)
-
- def test_create_project(self):
- """Call ``POST /projects``."""
- ref = self.new_project_ref(domain_id=self.domain_id)
- r = self.post(
- '/projects',
- body={'project': ref})
- self.assertValidProjectResponse(r, ref)
-
- def test_create_project_bad_request(self):
- """Call ``POST /projects``."""
- self.post('/projects', body={'project': {}},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_project_invalid_domain_id(self):
- """Call ``POST /projects``."""
- ref = self.new_project_ref(domain_id=uuid.uuid4().hex)
- self.post('/projects', body={'project': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_create_project_is_domain_not_allowed(self):
- """Call ``POST /projects``.
-
- Setting is_domain=True is not supported yet and should raise
- NotImplemented.
-
- """
- ref = self.new_project_ref(domain_id=self.domain_id, is_domain=True)
- self.post('/projects',
- body={'project': ref},
- expected_status=501)
-
- @utils.wip('waiting for projects acting as domains implementation')
- def test_create_project_without_parent_id_and_without_domain_id(self):
- """Call ``POST /projects``."""
-
- # Grant a domain role for the user
- collection_url = (
- '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
- 'domain_id': self.domain_id,
- 'user_id': self.user['id']})
- member_url = '%(collection_url)s/%(role_id)s' % {
- 'collection_url': collection_url,
- 'role_id': self.role_id}
- self.put(member_url)
-
- # Create an authentication request for a domain scoped token
- auth = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain_id)
-
- # Without domain_id and parent_id, the domain_id should be
- # normalized to the domain on the token, when using a domain
- # scoped token.
- ref = self.new_project_ref()
- r = self.post(
- '/projects',
- auth=auth,
- body={'project': ref})
- ref['domain_id'] = self.domain['id']
- self.assertValidProjectResponse(r, ref)
-
- @utils.wip('waiting for projects acting as domains implementation')
- def test_create_project_with_parent_id_and_no_domain_id(self):
- """Call ``POST /projects``."""
- # With only the parent_id, the domain_id should be
- # normalized to the parent's domain_id
- ref_child = self.new_project_ref(parent_id=self.project['id'])
-
- r = self.post(
- '/projects',
- body={'project': ref_child})
- self.assertEqual(r.result['project']['domain_id'],
- self.project['domain_id'])
- ref_child['domain_id'] = self.domain['id']
- self.assertValidProjectResponse(r, ref_child)
-
- def _create_projects_hierarchy(self, hierarchy_size=1):
- """Creates a single-branched project hierarchy with the specified size.
-
- :param hierarchy_size: the desired hierarchy size, default is 1 -
- a project with one child.
-
- :returns projects: a list of the projects in the created hierarchy.
-
- """
- new_ref = self.new_project_ref(domain_id=self.domain_id)
- resp = self.post('/projects', body={'project': new_ref})
-
- projects = [resp.result]
-
- for i in range(hierarchy_size):
- new_ref = self.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[i]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
-
- projects.append(resp.result)
-
- return projects
-
- def test_list_projects_filtering_by_parent_id(self):
- """Call ``GET /projects?parent_id={project_id}``."""
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- # Add another child to projects[1] - it will be projects[3]
- new_ref = self.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[1]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
-
- projects.append(resp.result)
-
- # Query for projects[0] immediate children - it will
- # be only projects[1]
- r = self.get(
- '/projects?parent_id=%(project_id)s' % {
- 'project_id': projects[0]['project']['id']})
- self.assertValidProjectListResponse(r)
-
- projects_result = r.result['projects']
- expected_list = [projects[1]['project']]
-
- # projects[0] has projects[1] as child
- self.assertEqual(expected_list, projects_result)
-
- # Query for projects[1] immediate children - it will
- # be projects[2] and projects[3]
- r = self.get(
- '/projects?parent_id=%(project_id)s' % {
- 'project_id': projects[1]['project']['id']})
- self.assertValidProjectListResponse(r)
-
- projects_result = r.result['projects']
- expected_list = [projects[2]['project'], projects[3]['project']]
-
- # projects[1] has projects[2] and projects[3] as children
- self.assertEqual(expected_list, projects_result)
-
- # Query for projects[2] immediate children - it will be an empty list
- r = self.get(
- '/projects?parent_id=%(project_id)s' % {
- 'project_id': projects[2]['project']['id']})
- self.assertValidProjectListResponse(r)
-
- projects_result = r.result['projects']
- expected_list = []
-
- # projects[2] has no child, projects_result must be an empty list
- self.assertEqual(expected_list, projects_result)
-
- def test_create_hierarchical_project(self):
- """Call ``POST /projects``."""
- self._create_projects_hierarchy()
-
- def test_get_project(self):
- """Call ``GET /projects/{project_id}``."""
- r = self.get(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id})
- self.assertValidProjectResponse(r, self.project)
-
- def test_get_project_with_parents_as_list_with_invalid_id(self):
- """Call ``GET /projects/{project_id}?parents_as_list``."""
- self.get('/projects/%(project_id)s?parents_as_list' % {
- 'project_id': None}, expected_status=http_client.NOT_FOUND)
-
- self.get('/projects/%(project_id)s?parents_as_list' % {
- 'project_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_get_project_with_subtree_as_list_with_invalid_id(self):
- """Call ``GET /projects/{project_id}?subtree_as_list``."""
- self.get('/projects/%(project_id)s?subtree_as_list' % {
- 'project_id': None}, expected_status=http_client.NOT_FOUND)
-
- self.get('/projects/%(project_id)s?subtree_as_list' % {
- 'project_id': uuid.uuid4().hex},
- expected_status=http_client.NOT_FOUND)
-
- def test_get_project_with_parents_as_ids(self):
- """Call ``GET /projects/{project_id}?parents_as_ids``."""
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- # Query for projects[2] parents_as_ids
- r = self.get(
- '/projects/%(project_id)s?parents_as_ids' % {
- 'project_id': projects[2]['project']['id']})
-
- self.assertValidProjectResponse(r, projects[2]['project'])
- parents_as_ids = r.result['project']['parents']
-
- # Assert parents_as_ids is a structured dictionary correctly
- # representing the hierarchy. The request was made using projects[2]
- # id, hence its parents should be projects[1] and projects[0]. It
- # should have the following structure:
- # {
- # projects[1]: {
- # projects[0]: None
- # }
- # }
- expected_dict = {
- projects[1]['project']['id']: {
- projects[0]['project']['id']: None
- }
- }
- self.assertDictEqual(expected_dict, parents_as_ids)
-
- # Query for projects[0] parents_as_ids
- r = self.get(
- '/projects/%(project_id)s?parents_as_ids' % {
- 'project_id': projects[0]['project']['id']})
-
- self.assertValidProjectResponse(r, projects[0]['project'])
- parents_as_ids = r.result['project']['parents']
-
- # projects[0] has no parents, parents_as_ids must be None
- self.assertIsNone(parents_as_ids)
-
- def test_get_project_with_parents_as_list_with_full_access(self):
- """``GET /projects/{project_id}?parents_as_list`` with full access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on each one of those projects;
- - Check that calling parents_as_list on 'subproject' returns both
- 'project' and 'parent'.
-
- """
-
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on all the created projects
- for proj in (parent, project, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?parents_as_list' %
- {'project_id': subproject['project']['id']})
- self.assertValidProjectResponse(r, subproject['project'])
-
- # Assert only 'project' and 'parent' are in the parents list
- self.assertIn(project, r.result['project']['parents'])
- self.assertIn(parent, r.result['project']['parents'])
- self.assertEqual(2, len(r.result['project']['parents']))
-
- def test_get_project_with_parents_as_list_with_partial_access(self):
- """``GET /projects/{project_id}?parents_as_list`` with partial access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on 'parent' and 'subproject';
- - Check that calling parents_as_list on 'subproject' only returns
- 'parent'.
-
- """
-
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on parent and subproject
- for proj in (parent, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?parents_as_list' %
- {'project_id': subproject['project']['id']})
- self.assertValidProjectResponse(r, subproject['project'])
-
- # Assert only 'parent' is in the parents list
- self.assertIn(parent, r.result['project']['parents'])
- self.assertEqual(1, len(r.result['project']['parents']))
-
- def test_get_project_with_parents_as_list_and_parents_as_ids(self):
- """Call ``GET /projects/{project_id}?parents_as_list&parents_as_ids``.
-
- """
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- self.get(
- '/projects/%(project_id)s?parents_as_list&parents_as_ids' % {
- 'project_id': projects[1]['project']['id']},
- expected_status=http_client.BAD_REQUEST)
-
- def test_get_project_with_subtree_as_ids(self):
- """Call ``GET /projects/{project_id}?subtree_as_ids``.
-
- This test creates a more complex hierarchy to test if the structured
- dictionary returned by using the ``subtree_as_ids`` query param
- correctly represents the hierarchy.
-
- The hierarchy contains 5 projects with the following structure::
-
- +--A--+
- | |
- +--B--+ C
- | |
- D E
-
-
- """
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- # Add another child to projects[0] - it will be projects[3]
- new_ref = self.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[0]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
- projects.append(resp.result)
-
- # Add another child to projects[1] - it will be projects[4]
- new_ref = self.new_project_ref(
- domain_id=self.domain_id,
- parent_id=projects[1]['project']['id'])
- resp = self.post('/projects',
- body={'project': new_ref})
- self.assertValidProjectResponse(resp, new_ref)
- projects.append(resp.result)
-
- # Query for projects[0] subtree_as_ids
- r = self.get(
- '/projects/%(project_id)s?subtree_as_ids' % {
- 'project_id': projects[0]['project']['id']})
- self.assertValidProjectResponse(r, projects[0]['project'])
- subtree_as_ids = r.result['project']['subtree']
-
- # The subtree hierarchy from projects[0] should have the following
- # structure:
- # {
- # projects[1]: {
- # projects[2]: None,
- # projects[4]: None
- # },
- # projects[3]: None
- # }
- expected_dict = {
- projects[1]['project']['id']: {
- projects[2]['project']['id']: None,
- projects[4]['project']['id']: None
- },
- projects[3]['project']['id']: None
- }
- self.assertDictEqual(expected_dict, subtree_as_ids)
-
- # Now query for projects[1] subtree_as_ids
- r = self.get(
- '/projects/%(project_id)s?subtree_as_ids' % {
- 'project_id': projects[1]['project']['id']})
- self.assertValidProjectResponse(r, projects[1]['project'])
- subtree_as_ids = r.result['project']['subtree']
-
- # The subtree hierarchy from projects[1] should have the following
- # structure:
- # {
- # projects[2]: None,
- # projects[4]: None
- # }
- expected_dict = {
- projects[2]['project']['id']: None,
- projects[4]['project']['id']: None
- }
- self.assertDictEqual(expected_dict, subtree_as_ids)
-
- # Now query for projects[3] subtree_as_ids
- r = self.get(
- '/projects/%(project_id)s?subtree_as_ids' % {
- 'project_id': projects[3]['project']['id']})
- self.assertValidProjectResponse(r, projects[3]['project'])
- subtree_as_ids = r.result['project']['subtree']
-
- # projects[3] has no subtree, subtree_as_ids must be None
- self.assertIsNone(subtree_as_ids)
-
- def test_get_project_with_subtree_as_list_with_full_access(self):
- """``GET /projects/{project_id}?subtree_as_list`` with full access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on each one of those projects;
- - Check that calling subtree_as_list on 'parent' returns both 'parent'
- and 'subproject'.
-
- """
-
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on all the created projects
- for proj in (parent, project, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?subtree_as_list' %
- {'project_id': parent['project']['id']})
- self.assertValidProjectResponse(r, parent['project'])
-
- # Assert only 'project' and 'subproject' are in the subtree
- self.assertIn(project, r.result['project']['subtree'])
- self.assertIn(subproject, r.result['project']['subtree'])
- self.assertEqual(2, len(r.result['project']['subtree']))
-
- def test_get_project_with_subtree_as_list_with_partial_access(self):
- """``GET /projects/{project_id}?subtree_as_list`` with partial access.
-
- Test plan:
-
- - Create 'parent', 'project' and 'subproject' projects;
- - Assign a user a role on 'parent' and 'subproject';
- - Check that calling subtree_as_list on 'parent' returns 'subproject'.
-
- """
-
- # Create the project hierarchy
- parent, project, subproject = self._create_projects_hierarchy(2)
-
- # Assign a role for the user on parent and subproject
- for proj in (parent, subproject):
- self.put(self.build_role_assignment_link(
- role_id=self.role_id, user_id=self.user_id,
- project_id=proj['project']['id']))
-
- # Make the API call
- r = self.get('/projects/%(project_id)s?subtree_as_list' %
- {'project_id': parent['project']['id']})
- self.assertValidProjectResponse(r, parent['project'])
-
- # Assert only 'subproject' is in the subtree
- self.assertIn(subproject, r.result['project']['subtree'])
- self.assertEqual(1, len(r.result['project']['subtree']))
-
- def test_get_project_with_subtree_as_list_and_subtree_as_ids(self):
- """Call ``GET /projects/{project_id}?subtree_as_list&subtree_as_ids``.
-
- """
- projects = self._create_projects_hierarchy(hierarchy_size=2)
-
- self.get(
- '/projects/%(project_id)s?subtree_as_list&subtree_as_ids' % {
- 'project_id': projects[1]['project']['id']},
- expected_status=http_client.BAD_REQUEST)
-
- def test_update_project(self):
- """Call ``PATCH /projects/{project_id}``."""
- ref = self.new_project_ref(domain_id=self.domain_id)
- del ref['id']
- r = self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id},
- body={'project': ref})
- self.assertValidProjectResponse(r, ref)
-
- def test_update_project_domain_id(self):
- """Call ``PATCH /projects/{project_id}`` with domain_id."""
- project = self.new_project_ref(domain_id=self.domain['id'])
- self.resource_api.create_project(project['id'], project)
- project['domain_id'] = CONF.identity.default_domain_id
- r = self.patch('/projects/%(project_id)s' % {
- 'project_id': project['id']},
- body={'project': project},
- expected_status=exception.ValidationError.code)
- self.config_fixture.config(domain_id_immutable=False)
- project['domain_id'] = self.domain['id']
- r = self.patch('/projects/%(project_id)s' % {
- 'project_id': project['id']},
- body={'project': project})
- self.assertValidProjectResponse(r, project)
-
- def test_update_project_parent_id(self):
- """Call ``PATCH /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- leaf_project = projects[1]['project']
- leaf_project['parent_id'] = None
- self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': leaf_project['id']},
- body={'project': leaf_project},
- expected_status=http_client.FORBIDDEN)
-
- def test_update_project_is_domain_not_allowed(self):
- """Call ``PATCH /projects/{project_id}`` with is_domain.
-
- The is_domain flag is immutable.
- """
- project = self.new_project_ref(domain_id=self.domain['id'])
- resp = self.post('/projects',
- body={'project': project})
- self.assertFalse(resp.result['project']['is_domain'])
-
- project['is_domain'] = True
- self.patch('/projects/%(project_id)s' % {
- 'project_id': resp.result['project']['id']},
- body={'project': project},
- expected_status=http_client.BAD_REQUEST)
-
- def test_disable_leaf_project(self):
- """Call ``PATCH /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- leaf_project = projects[1]['project']
- leaf_project['enabled'] = False
- r = self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': leaf_project['id']},
- body={'project': leaf_project})
- self.assertEqual(
- leaf_project['enabled'], r.result['project']['enabled'])
-
- def test_disable_not_leaf_project(self):
- """Call ``PATCH /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- root_project = projects[0]['project']
- root_project['enabled'] = False
- self.patch(
- '/projects/%(project_id)s' % {
- 'project_id': root_project['id']},
- body={'project': root_project},
- expected_status=http_client.FORBIDDEN)
-
- def test_delete_project(self):
- """Call ``DELETE /projects/{project_id}``
-
- As well as making sure the delete succeeds, we ensure
- that any credentials that reference this projects are
- also deleted, while other credentials are unaffected.
-
- """
- # First check the credential for this project is present
- r = self.credential_api.get_credential(self.credential['id'])
- self.assertDictEqual(r, self.credential)
- # Create a second credential with a different project
- self.project2 = self.new_project_ref(
- domain_id=self.domain['id'])
- self.resource_api.create_project(self.project2['id'], self.project2)
- self.credential2 = self.new_credential_ref(
- user_id=self.user['id'],
- project_id=self.project2['id'])
- self.credential_api.create_credential(
- self.credential2['id'],
- self.credential2)
-
- # Now delete the project
- self.delete(
- '/projects/%(project_id)s' % {
- 'project_id': self.project_id})
-
- # Deleting the project should have deleted any credentials
- # that reference this project
- self.assertRaises(exception.CredentialNotFound,
- self.credential_api.get_credential,
- credential_id=self.credential['id'])
- # But the credential for project2 is unaffected
- r = self.credential_api.get_credential(self.credential2['id'])
- self.assertDictEqual(r, self.credential2)
-
- def test_delete_not_leaf_project(self):
- """Call ``DELETE /projects/{project_id}``."""
- projects = self._create_projects_hierarchy()
- self.delete(
- '/projects/%(project_id)s' % {
- 'project_id': projects[0]['project']['id']},
- expected_status=http_client.FORBIDDEN)
-
# Role CRUD tests
def test_create_role(self):
"""Call ``POST /roles``."""
- ref = self.new_role_ref()
+ ref = unit.new_role_ref()
r = self.post(
'/roles',
body={'role': ref})
@@ -1090,7 +66,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
def test_update_role(self):
"""Call ``PATCH /roles/{role_id}``."""
- ref = self.new_role_ref()
+ ref = unit.new_role_ref()
del ref['id']
r = self.patch('/roles/%(role_id)s' % {
'role_id': self.role_id},
@@ -1105,8 +81,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
def test_create_member_role(self):
"""Call ``POST /roles``."""
# specify only the name on creation
- ref = self.new_role_ref()
- ref['name'] = CONF.member_role_name
+ ref = unit.new_role_ref(name=CONF.member_role_name)
r = self.post(
'/roles',
body={'role': ref})
@@ -1118,35 +93,41 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
# Role Grants tests
def test_crud_user_project_role_grants(self):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+
collection_url = (
'/projects/%(project_id)s/users/%(user_id)s/roles' % {
'project_id': self.project['id'],
'user_id': self.user['id']})
member_url = '%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
- 'role_id': self.role_id}
+ 'role_id': role['id']}
+
+ # There is a role assignment for self.user on self.project
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role,
+ expected_length=1)
self.put(member_url)
self.head(member_url)
r = self.get(collection_url)
- self.assertValidRoleListResponse(r, ref=self.role,
- resource_url=collection_url)
+ self.assertValidRoleListResponse(r, ref=role,
+ resource_url=collection_url,
+ expected_length=2)
- # FIXME(gyee): this test is no longer valid as user
- # have no role in the project. Can't get a scoped token
- # self.delete(member_url)
- # r = self.get(collection_url)
- # self.assertValidRoleListResponse(r, expected_length=0)
- # self.assertIn(collection_url, r.result['links']['self'])
+ self.delete(member_url)
+ r = self.get(collection_url)
+ self.assertValidRoleListResponse(r, ref=self.role, expected_length=1)
+ self.assertIn(collection_url, r.result['links']['self'])
def test_crud_user_project_role_grants_no_user(self):
- """Grant role on a project to a user that doesn't exist, 404 result.
+ """Grant role on a project to a user that doesn't exist.
When grant a role on a project to a user that doesn't exist, the server
returns Not Found for the user.
"""
-
user_id = uuid.uuid4().hex
collection_url = (
@@ -1179,13 +160,12 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
resource_url=collection_url)
def test_crud_user_domain_role_grants_no_user(self):
- """Grant role on a domain to a user that doesn't exist, 404 result.
+ """Grant role on a domain to a user that doesn't exist.
When grant a role on a domain to a user that doesn't exist, the server
returns 404 Not Found for the user.
"""
-
user_id = uuid.uuid4().hex
collection_url = (
@@ -1218,13 +198,12 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
resource_url=collection_url)
def test_crud_group_project_role_grants_no_group(self):
- """Grant role on a project to a group that doesn't exist, 404 result.
+ """Grant role on a project to a group that doesn't exist.
When grant a role on a project to a group that doesn't exist, the
server returns 404 Not Found for the group.
"""
-
group_id = uuid.uuid4().hex
collection_url = (
@@ -1258,13 +237,12 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
resource_url=collection_url)
def test_crud_group_domain_role_grants_no_group(self):
- """Grant role on a domain to a group that doesn't exist, 404 result.
+ """Grant role on a domain to a group that doesn't exist.
When grant a role on a domain to a group that doesn't exist, the server
returns 404 Not Found for the group.
"""
-
group_id = uuid.uuid4().hex
collection_url = (
@@ -1280,7 +258,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
def _create_new_user_and_assign_role_on_project(self):
"""Create a new user and assign user a role on a project."""
# Create a new user
- new_user = self.new_user_ref(domain_id=self.domain_id)
+ new_user = unit.new_user_ref(domain_id=self.domain_id)
user_ref = self.identity_api.create_user(new_user)
# Assign the user a role on the project
collection_url = (
@@ -1290,9 +268,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
member_url = ('%(collection_url)s/%(role_id)s' % {
'collection_url': collection_url,
'role_id': self.role_id})
- self.put(member_url, expected_status=204)
+ self.put(member_url)
# Check the user has the role assigned
- self.head(member_url, expected_status=204)
+ self.head(member_url)
return member_url, user_ref
def test_delete_user_before_removing_role_assignment_succeeds(self):
@@ -1301,7 +279,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
# Delete the user from identity backend
self.identity_api.driver.delete_user(user['id'])
# Clean up the role assignment
- self.delete(member_url, expected_status=204)
+ self.delete(member_url)
# Make sure the role is gone
self.head(member_url, expected_status=http_client.NOT_FOUND)
@@ -1310,8 +288,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
member_url, user = self._create_new_user_and_assign_role_on_project()
# Delete the user from identity backend
self.identity_api.delete_user(user['id'])
- # We should get a 404 when looking for the user in the identity
- # backend because we're not performing a delete operation on the role.
+ # We should get a 404 Not Found when looking for the user in the
+ # identity backend because we're not performing a delete operation on
+ # the role.
self.head(member_url, expected_status=http_client.NOT_FOUND)
def test_token_revoked_once_group_role_grant_revoked(self):
@@ -1344,7 +323,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
# validates the returned token; it should be valid.
self.head('/auth/tokens',
headers={'x-subject-token': token},
- expected_status=200)
+ expected_status=http_client.OK)
# revokes the grant from group on project.
self.assignment_api.delete_grant(role_id=self.role['id'],
@@ -1356,6 +335,126 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
headers={'x-subject-token': token},
expected_status=http_client.NOT_FOUND)
+ @unit.skip_if_cache_disabled('assignment')
+ def test_delete_grant_from_user_and_project_invalidate_cache(self):
+ # create a new project
+ new_project = unit.new_project_ref(domain_id=self.domain_id)
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ collection_url = (
+ '/projects/%(project_id)s/users/%(user_id)s/roles' % {
+ 'project_id': new_project['id'],
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ # create the user a grant on the new project
+ self.put(member_url)
+
+ # check the grant that was just created
+ self.head(member_url)
+ resp = self.get(collection_url)
+ self.assertValidRoleListResponse(resp, ref=self.role,
+ resource_url=collection_url)
+
+ # delete the grant
+ self.delete(member_url)
+
+ # get the collection and ensure there are no roles on the project
+ resp = self.get(collection_url)
+ self.assertListEqual(resp.json_body['roles'], [])
+
+ @unit.skip_if_cache_disabled('assignment')
+ def test_delete_grant_from_user_and_domain_invalidates_cache(self):
+ # create a new domain
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': new_domain['id'],
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ # create the user a grant on the new domain
+ self.put(member_url)
+
+ # check the grant that was just created
+ self.head(member_url)
+ resp = self.get(collection_url)
+ self.assertValidRoleListResponse(resp, ref=self.role,
+ resource_url=collection_url)
+
+ # delete the grant
+ self.delete(member_url)
+
+ # get the collection and ensure there are no roles on the domain
+ resp = self.get(collection_url)
+ self.assertListEqual(resp.json_body['roles'], [])
+
+ @unit.skip_if_cache_disabled('assignment')
+ def test_delete_grant_from_group_and_project_invalidates_cache(self):
+ # create a new project
+ new_project = unit.new_project_ref(domain_id=self.domain_id)
+ self.resource_api.create_project(new_project['id'], new_project)
+
+ collection_url = (
+ '/projects/%(project_id)s/groups/%(group_id)s/roles' % {
+ 'project_id': new_project['id'],
+ 'group_id': self.group['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ # create the group a grant on the new project
+ self.put(member_url)
+
+ # check the grant that was just created
+ self.head(member_url)
+ resp = self.get(collection_url)
+ self.assertValidRoleListResponse(resp, ref=self.role,
+ resource_url=collection_url)
+
+ # delete the grant
+ self.delete(member_url)
+
+ # get the collection and ensure there are no roles on the project
+ resp = self.get(collection_url)
+ self.assertListEqual(resp.json_body['roles'], [])
+
+ @unit.skip_if_cache_disabled('assignment')
+ def test_delete_grant_from_group_and_domain_invalidates_cache(self):
+ # create a new domain
+ new_domain = unit.new_domain_ref()
+ self.resource_api.create_domain(new_domain['id'], new_domain)
+
+ collection_url = (
+ '/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
+ 'domain_id': new_domain['id'],
+ 'group_id': self.group['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+
+ # create the group a grant on the new domain
+ self.put(member_url)
+
+ # check the grant that was just created
+ self.head(member_url)
+ resp = self.get(collection_url)
+ self.assertValidRoleListResponse(resp, ref=self.role,
+ resource_url=collection_url)
+
+ # delete the grant
+ self.delete(member_url)
+
+ # get the collection and ensure there are no roles on the domain
+ resp = self.get(collection_url)
+ self.assertListEqual(resp.json_body['roles'], [])
+
# Role Assignments tests
def test_get_role_assignments(self):
@@ -1384,13 +483,11 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
been removed
"""
-
# Since the default fixtures already assign some roles to the
# user it creates, we also need a new user that will not have any
# existing assignments
- self.user1 = self.new_user_ref(
- domain_id=self.domain['id'])
- self.user1 = self.identity_api.create_user(self.user1)
+ user1 = unit.new_user_ref(domain_id=self.domain['id'])
+ user1 = self.identity_api.create_user(user1)
collection_url = '/role_assignments'
r = self.get(collection_url)
@@ -1412,7 +509,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
self.assertRoleAssignmentInListResponse(r, gd_entity)
ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
- user_id=self.user1['id'],
+ user_id=user1['id'],
role_id=self.role_id)
self.put(ud_entity['links']['assignment'])
r = self.get(collection_url)
@@ -1434,7 +531,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
self.assertRoleAssignmentInListResponse(r, gp_entity)
up_entity = self.build_role_assignment_entity(
- project_id=self.project_id, user_id=self.user1['id'],
+ project_id=self.project_id, user_id=user1['id'],
role_id=self.role_id)
self.put(up_entity['links']['assignment'])
r = self.get(collection_url)
@@ -1475,18 +572,13 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
for each of the group members.
"""
- self.user1 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user1['password']
- self.user1 = self.identity_api.create_user(self.user1)
- self.user1['password'] = password
- self.user2 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user2['password']
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
- self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
- self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
+ user1 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+ user2 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+
+ self.identity_api.add_user_to_group(user1['id'], self.group['id'])
+ self.identity_api.add_user_to_group(user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url)
@@ -1516,11 +608,11 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
- user_id=self.user1['id'], role_id=self.role_id)
+ user_id=user1['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
- user_id=self.user2['id'], role_id=self.role_id)
+ user_id=user2['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
def test_check_effective_values_for_role_assignments(self):
@@ -1549,18 +641,13 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
know if we are getting effective roles or not
"""
- self.user1 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user1['password']
- self.user1 = self.identity_api.create_user(self.user1)
- self.user1['password'] = password
- self.user2 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user2['password']
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
- self.identity_api.add_user_to_group(self.user1['id'], self.group['id'])
- self.identity_api.add_user_to_group(self.user2['id'], self.group['id'])
+ user1 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+ user2 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+
+ self.identity_api.add_user_to_group(user1['id'], self.group['id'])
+ self.identity_api.add_user_to_group(user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url)
@@ -1633,61 +720,53 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
token (all effective roles for a user on a project)
"""
-
# Since the default fixtures already assign some roles to the
# user it creates, we also need a new user that will not have any
# existing assignments
- self.user1 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user1['password']
- self.user1 = self.identity_api.create_user(self.user1)
- self.user1['password'] = password
- self.user2 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user2['password']
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
- self.group1 = self.new_group_ref(
- domain_id=self.domain['id'])
- self.group1 = self.identity_api.create_group(self.group1)
- self.identity_api.add_user_to_group(self.user1['id'],
- self.group1['id'])
- self.identity_api.add_user_to_group(self.user2['id'],
- self.group1['id'])
- self.project1 = self.new_project_ref(
- domain_id=self.domain['id'])
- self.resource_api.create_project(self.project1['id'], self.project1)
- self.role1 = self.new_role_ref()
+ user1 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+ user2 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+
+ group1 = unit.new_group_ref(domain_id=self.domain['id'])
+ group1 = self.identity_api.create_group(group1)
+ self.identity_api.add_user_to_group(user1['id'], group1['id'])
+ self.identity_api.add_user_to_group(user2['id'], group1['id'])
+ project1 = unit.new_project_ref(domain_id=self.domain['id'])
+ self.resource_api.create_project(project1['id'], project1)
+ self.role1 = unit.new_role_ref()
self.role_api.create_role(self.role1['id'], self.role1)
- self.role2 = self.new_role_ref()
+ self.role2 = unit.new_role_ref()
self.role_api.create_role(self.role2['id'], self.role2)
# Now add one of each of the four types of assignment
gd_entity = self.build_role_assignment_entity(
- domain_id=self.domain_id, group_id=self.group1['id'],
+ domain_id=self.domain_id, group_id=group1['id'],
role_id=self.role1['id'])
self.put(gd_entity['links']['assignment'])
ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
- user_id=self.user1['id'],
+ user_id=user1['id'],
role_id=self.role2['id'])
self.put(ud_entity['links']['assignment'])
gp_entity = self.build_role_assignment_entity(
- project_id=self.project1['id'], group_id=self.group1['id'],
+ project_id=project1['id'],
+ group_id=group1['id'],
role_id=self.role1['id'])
self.put(gp_entity['links']['assignment'])
up_entity = self.build_role_assignment_entity(
- project_id=self.project1['id'], user_id=self.user1['id'],
+ project_id=project1['id'],
+ user_id=user1['id'],
role_id=self.role2['id'])
self.put(up_entity['links']['assignment'])
# Now list by various filters to make sure we get back the right ones
collection_url = ('/role_assignments?scope.project.id=%s' %
- self.project1['id'])
+ project1['id'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
@@ -1704,7 +783,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, gd_entity)
- collection_url = '/role_assignments?user.id=%s' % self.user1['id']
+ collection_url = '/role_assignments?user.id=%s' % user1['id']
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
@@ -1712,7 +791,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
- collection_url = '/role_assignments?group.id=%s' % self.group1['id']
+ collection_url = '/role_assignments?group.id=%s' % group1['id']
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
@@ -1733,8 +812,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
collection_url = (
'/role_assignments?user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
- 'user_id': self.user1['id'],
- 'project_id': self.project1['id']})
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
@@ -1746,7 +825,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
# assigned as well as by virtue of group membership
collection_url = ('/role_assignments?effective&user.id=%s' %
- self.user1['id'])
+ user1['id'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=4,
@@ -1756,17 +835,18 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
self.assertRoleAssignmentInListResponse(r, ud_entity)
# ...and the two via group membership...
gp1_link = self.build_role_assignment_link(
- project_id=self.project1['id'], group_id=self.group1['id'],
+ project_id=project1['id'],
+ group_id=group1['id'],
role_id=self.role1['id'])
gd1_link = self.build_role_assignment_link(domain_id=self.domain_id,
- group_id=self.group1['id'],
+ group_id=group1['id'],
role_id=self.role1['id'])
up1_entity = self.build_role_assignment_entity(
- link=gp1_link, project_id=self.project1['id'],
- user_id=self.user1['id'], role_id=self.role1['id'])
+ link=gp1_link, project_id=project1['id'],
+ user_id=user1['id'], role_id=self.role1['id'])
ud1_entity = self.build_role_assignment_entity(
- link=gd1_link, domain_id=self.domain_id, user_id=self.user1['id'],
+ link=gd1_link, domain_id=self.domain_id, user_id=user1['id'],
role_id=self.role1['id'])
self.assertRoleAssignmentInListResponse(r, up1_entity)
self.assertRoleAssignmentInListResponse(r, ud1_entity)
@@ -1778,8 +858,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase,
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
- 'user_id': self.user1['id'],
- 'project_id': self.project1['id']})
+ 'user_id': user1['id'],
+ 'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
@@ -1804,7 +884,7 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
"""
def create_project_hierarchy(parent_id, depth):
- "Creates a random project hierarchy."
+ """Creates a random project hierarchy."""
if depth == 0:
return
@@ -1812,7 +892,7 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
subprojects = []
for i in range(breadth):
- subprojects.append(self.new_project_ref(
+ subprojects.append(unit.new_project_ref(
domain_id=self.domain_id, parent_id=parent_id))
self.resource_api.create_project(subprojects[-1]['id'],
subprojects[-1])
@@ -1823,12 +903,12 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
super(RoleAssignmentBaseTestCase, self).load_sample_data()
# Create a domain
- self.domain = self.new_domain_ref()
+ self.domain = unit.new_domain_ref()
self.domain_id = self.domain['id']
self.resource_api.create_domain(self.domain_id, self.domain)
# Create a project hierarchy
- self.project = self.new_project_ref(domain_id=self.domain_id)
+ self.project = unit.new_project_ref(domain_id=self.domain_id)
self.project_id = self.project['id']
self.resource_api.create_project(self.project_id, self.project)
@@ -1839,14 +919,14 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
# Create 3 users
self.user_ids = []
for i in range(3):
- user = self.new_user_ref(domain_id=self.domain_id)
+ user = unit.new_user_ref(domain_id=self.domain_id)
user = self.identity_api.create_user(user)
self.user_ids.append(user['id'])
# Create 3 groups
self.group_ids = []
for i in range(3):
- group = self.new_group_ref(domain_id=self.domain_id)
+ group = unit.new_group_ref(domain_id=self.domain_id)
group = self.identity_api.create_group(group)
self.group_ids.append(group['id'])
@@ -1861,7 +941,7 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
role_id=self.role_id)
# Create a role
- self.role = self.new_role_ref()
+ self.role = unit.new_role_ref()
self.role_id = self.role['id']
self.role_api.create_role(self.role_id, self.role)
@@ -1869,7 +949,7 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
self.default_user_id = self.user_ids[0]
self.default_group_id = self.group_ids[0]
- def get_role_assignments(self, expected_status=200, **filters):
+ def get_role_assignments(self, expected_status=http_client.OK, **filters):
"""Returns the result from querying role assignment API + queried URL.
Calls GET /v3/role_assignments?<params> and returns its result, where
@@ -1880,7 +960,6 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
queried URL.
"""
-
query_url = self._get_role_assignments_query_url(**filters)
response = self.get(query_url, expected_status=expected_status)
@@ -1903,11 +982,11 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase):
"""Class for testing invalid query params on /v3/role_assignments API.
- Querying domain and project, or user and group results in a HTTP 400, since
- a role assignment must contain only a single pair of (actor, target). In
- addition, since filtering on role assignments applies only to the final
- result, effective mode cannot be combined with i) group or ii) domain and
- inherited, because it would always result in an empty list.
+ Querying domain and project, or user and group results in a HTTP 400 Bad
+ Request, since a role assignment must contain only a single pair of (actor,
+ target). In addition, since filtering on role assignments applies only to
+ the final result, effective mode cannot be combined with i) group or ii)
+ domain and inherited, because it would always result in an empty list.
"""
@@ -1959,7 +1038,6 @@ class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase):
group_id, user_id and inherited_to_projects.
"""
-
# Fills default assignment with provided filters
test_assignment = self._set_default_assignment_attributes(**filters)
@@ -2188,10 +1266,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
def test_get_token_from_inherited_user_domain_role_grants(self):
# Create a new user to ensure that no grant is loaded from sample data
- user = self.new_user_ref(domain_id=self.domain_id)
- password = user['password']
- user = self.identity_api.create_user(user)
- user['password'] = password
+ user = unit.create_user(self.identity_api, domain_id=self.domain_id)
# Define domain and project authentication data
domain_auth_data = self.build_authentication_request(
@@ -2204,10 +1279,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
project_id=self.project_id)
# Check the user cannot get a domain nor a project token
- self.v3_authenticate_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Grant non-inherited role for user on domain
non_inher_ud_link = self.build_role_assignment_link(
@@ -2215,12 +1290,12 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(non_inher_ud_link)
# Check the user can get only a domain token
- self.v3_authenticate_token(domain_auth_data)
- self.v3_authenticate_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data)
+ self.v3_create_token(project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Create inherited role
- inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
+ inherited_role = unit.new_role_ref(name='inherited')
self.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
@@ -2230,33 +1305,30 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(inher_ud_link)
# Check the user can get both a domain and a project token
- self.v3_authenticate_token(domain_auth_data)
- self.v3_authenticate_token(project_auth_data)
+ self.v3_create_token(domain_auth_data)
+ self.v3_create_token(project_auth_data)
# Delete inherited grant
self.delete(inher_ud_link)
# Check the user can only get a domain token
- self.v3_authenticate_token(domain_auth_data)
- self.v3_authenticate_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data)
+ self.v3_create_token(project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Delete non-inherited grant
self.delete(non_inher_ud_link)
# Check the user cannot get a domain token anymore
- self.v3_authenticate_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_get_token_from_inherited_group_domain_role_grants(self):
# Create a new group and put a new user in it to
# ensure that no grant is loaded from sample data
- user = self.new_user_ref(domain_id=self.domain_id)
- password = user['password']
- user = self.identity_api.create_user(user)
- user['password'] = password
+ user = unit.create_user(self.identity_api, domain_id=self.domain_id)
- group = self.new_group_ref(domain_id=self.domain['id'])
+ group = unit.new_group_ref(domain_id=self.domain['id'])
group = self.identity_api.create_group(group)
self.identity_api.add_user_to_group(user['id'], group['id'])
@@ -2271,10 +1343,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
project_id=self.project_id)
# Check the user cannot get a domain nor a project token
- self.v3_authenticate_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Grant non-inherited role for user on domain
non_inher_gd_link = self.build_role_assignment_link(
@@ -2282,12 +1354,12 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(non_inher_gd_link)
# Check the user can get only a domain token
- self.v3_authenticate_token(domain_auth_data)
- self.v3_authenticate_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data)
+ self.v3_create_token(project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Create inherited role
- inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
+ inherited_role = unit.new_role_ref(name='inherited')
self.role_api.create_role(inherited_role['id'], inherited_role)
# Grant inherited role for user on domain
@@ -2297,27 +1369,27 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(inher_gd_link)
# Check the user can get both a domain and a project token
- self.v3_authenticate_token(domain_auth_data)
- self.v3_authenticate_token(project_auth_data)
+ self.v3_create_token(domain_auth_data)
+ self.v3_create_token(project_auth_data)
# Delete inherited grant
self.delete(inher_gd_link)
# Check the user can only get a domain token
- self.v3_authenticate_token(domain_auth_data)
- self.v3_authenticate_token(project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data)
+ self.v3_create_token(project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Delete non-inherited grant
self.delete(non_inher_gd_link)
# Check the user cannot get a domain token anymore
- self.v3_authenticate_token(domain_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(domain_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def _test_crud_inherited_and_direct_assignment_on_target(self, target_url):
# Create a new role to avoid assignments loaded from sample data
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
# Define URLs
@@ -2360,7 +1432,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
def test_crud_user_inherited_domain_role_grants(self):
role_list = []
for _ in range(2):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
role_list.append(role)
@@ -2409,22 +1481,16 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
"""
role_list = []
for _ in range(4):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
role_list.append(role)
- domain = self.new_domain_ref()
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user1 = self.new_user_ref(
- domain_id=domain['id'])
- password = user1['password']
- user1 = self.identity_api.create_user(user1)
- user1['password'] = password
- project1 = self.new_project_ref(
- domain_id=domain['id'])
+ user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
+ project1 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
- project2 = self.new_project_ref(
- domain_id=domain['id'])
+ project2 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
self.assignment_api.add_role_to_user_and_project(
@@ -2490,6 +1556,98 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
+ def test_list_role_assignments_include_names(self):
+ """Call ``GET /role_assignments with include names``.
+
+ Test Plan:
+
+ - Create a domain with a group and a user
+ - Create a project with a group and a user
+
+ """
+ role1 = unit.new_role_ref()
+ self.role_api.create_role(role1['id'], role1)
+ user1 = unit.create_user(self.identity_api, domain_id=self.domain_id)
+ group = unit.new_group_ref(domain_id=self.domain_id)
+ group = self.identity_api.create_group(group)
+ project1 = unit.new_project_ref(domain_id=self.domain_id)
+ self.resource_api.create_project(project1['id'], project1)
+
+ expected_entity1 = self.build_role_assignment_entity_include_names(
+ role_ref=role1,
+ project_ref=project1,
+ user_ref=user1)
+ self.put(expected_entity1['links']['assignment'])
+ expected_entity2 = self.build_role_assignment_entity_include_names(
+ role_ref=role1,
+ domain_ref=self.domain,
+ group_ref=group)
+ self.put(expected_entity2['links']['assignment'])
+ expected_entity3 = self.build_role_assignment_entity_include_names(
+ role_ref=role1,
+ domain_ref=self.domain,
+ user_ref=user1)
+ self.put(expected_entity3['links']['assignment'])
+ expected_entity4 = self.build_role_assignment_entity_include_names(
+ role_ref=role1,
+ project_ref=project1,
+ group_ref=group)
+ self.put(expected_entity4['links']['assignment'])
+
+ collection_url_domain = (
+ '/role_assignments?include_names&scope.domain.id=%(domain_id)s' % {
+ 'domain_id': self.domain_id})
+ rs_domain = self.get(collection_url_domain)
+ collection_url_project = (
+ '/role_assignments?include_names&'
+ 'scope.project.id=%(project_id)s' % {
+ 'project_id': project1['id']})
+ rs_project = self.get(collection_url_project)
+ collection_url_group = (
+ '/role_assignments?include_names&group.id=%(group_id)s' % {
+ 'group_id': group['id']})
+ rs_group = self.get(collection_url_group)
+ collection_url_user = (
+ '/role_assignments?include_names&user.id=%(user_id)s' % {
+ 'user_id': user1['id']})
+ rs_user = self.get(collection_url_user)
+ collection_url_role = (
+ '/role_assignments?include_names&role.id=%(role_id)s' % {
+ 'role_id': role1['id']})
+ rs_role = self.get(collection_url_role)
+ # Make sure all entities were created successfully
+ self.assertEqual(rs_domain.status_int, http_client.OK)
+ self.assertEqual(rs_project.status_int, http_client.OK)
+ self.assertEqual(rs_group.status_int, http_client.OK)
+ self.assertEqual(rs_user.status_int, http_client.OK)
+ # Make sure we can get back the correct number of entities
+ self.assertValidRoleAssignmentListResponse(
+ rs_domain,
+ expected_length=2,
+ resource_url=collection_url_domain)
+ self.assertValidRoleAssignmentListResponse(
+ rs_project,
+ expected_length=2,
+ resource_url=collection_url_project)
+ self.assertValidRoleAssignmentListResponse(
+ rs_group,
+ expected_length=2,
+ resource_url=collection_url_group)
+ self.assertValidRoleAssignmentListResponse(
+ rs_user,
+ expected_length=2,
+ resource_url=collection_url_user)
+ self.assertValidRoleAssignmentListResponse(
+ rs_role,
+ expected_length=4,
+ resource_url=collection_url_role)
+ # Verify all types of entities have the correct format
+ self.assertRoleAssignmentInListResponse(rs_domain, expected_entity2)
+ self.assertRoleAssignmentInListResponse(rs_project, expected_entity1)
+ self.assertRoleAssignmentInListResponse(rs_group, expected_entity4)
+ self.assertRoleAssignmentInListResponse(rs_user, expected_entity3)
+ self.assertRoleAssignmentInListResponse(rs_role, expected_entity1)
+
def test_list_role_assignments_for_disabled_inheritance_extension(self):
"""Call ``GET /role_assignments with inherited domain grants``.
@@ -2503,25 +1661,18 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
shows up.
"""
-
role_list = []
for _ in range(4):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
role_list.append(role)
- domain = self.new_domain_ref()
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user1 = self.new_user_ref(
- domain_id=domain['id'])
- password = user1['password']
- user1 = self.identity_api.create_user(user1)
- user1['password'] = password
- project1 = self.new_project_ref(
- domain_id=domain['id'])
+ user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
+ project1 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
- project2 = self.new_project_ref(
- domain_id=domain['id'])
+ project2 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
self.assignment_api.add_role_to_user_and_project(
@@ -2598,34 +1749,23 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
"""
role_list = []
for _ in range(4):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
role_list.append(role)
- domain = self.new_domain_ref()
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user1 = self.new_user_ref(
- domain_id=domain['id'])
- password = user1['password']
- user1 = self.identity_api.create_user(user1)
- user1['password'] = password
- user2 = self.new_user_ref(
- domain_id=domain['id'])
- password = user2['password']
- user2 = self.identity_api.create_user(user2)
- user2['password'] = password
- group1 = self.new_group_ref(
- domain_id=domain['id'])
+ user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
+ user2 = unit.create_user(self.identity_api, domain_id=domain['id'])
+ group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
self.identity_api.add_user_to_group(user1['id'],
group1['id'])
self.identity_api.add_user_to_group(user2['id'],
group1['id'])
- project1 = self.new_project_ref(
- domain_id=domain['id'])
+ project1 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
- project2 = self.new_project_ref(
- domain_id=domain['id'])
+ project2 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
self.assignment_api.add_role_to_user_and_project(
@@ -2704,25 +1844,18 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
"""
role_list = []
for _ in range(5):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
role_list.append(role)
- domain = self.new_domain_ref()
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user1 = self.new_user_ref(
- domain_id=domain['id'])
- password = user1['password']
- user1 = self.identity_api.create_user(user1)
- user1['password'] = password
- group1 = self.new_group_ref(
- domain_id=domain['id'])
+ user1 = unit.create_user(self.identity_api, domain_id=domain['id'])
+ group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = self.identity_api.create_group(group1)
- project1 = self.new_project_ref(
- domain_id=domain['id'])
+ project1 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project1['id'], project1)
- project2 = self.new_project_ref(
- domain_id=domain['id'])
+ project2 = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project2['id'], project2)
# Add some spoiler roles to the projects
self.assignment_api.add_role_to_user_and_project(
@@ -2790,17 +1923,17 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
"""
# Create project hierarchy
- root = self.new_project_ref(domain_id=self.domain['id'])
- leaf = self.new_project_ref(domain_id=self.domain['id'],
+ root = unit.new_project_ref(domain_id=self.domain['id'])
+ leaf = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=root['id'])
self.resource_api.create_project(root['id'], root)
self.resource_api.create_project(leaf['id'], leaf)
# Create 'non-inherited' and 'inherited' roles
- non_inherited_role = {'id': uuid.uuid4().hex, 'name': 'non-inherited'}
+ non_inherited_role = unit.new_role_ref(name='non-inherited')
self.role_api.create_role(non_inherited_role['id'], non_inherited_role)
- inherited_role = {'id': uuid.uuid4().hex, 'name': 'inherited'}
+ inherited_role = unit.new_role_ref(name='inherited')
self.role_api.create_role(inherited_role['id'], inherited_role)
return (root['id'], leaf['id'],
@@ -2822,10 +1955,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
project_id=leaf_id)
# Check the user cannot get a token on root nor leaf project
- self.v3_authenticate_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(root_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Grant non-inherited role for user on leaf project
non_inher_up_link = self.build_role_assignment_link(
@@ -2834,9 +1967,9 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(non_inher_up_link)
# Check the user can only get a token on leaf project
- self.v3_authenticate_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(leaf_project_auth_data)
+ self.v3_create_token(root_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data)
# Grant inherited role for user on root project
inher_up_link = self.build_role_assignment_link(
@@ -2845,24 +1978,24 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(inher_up_link)
# Check the user still can get a token only on leaf project
- self.v3_authenticate_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(leaf_project_auth_data)
+ self.v3_create_token(root_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data)
# Delete non-inherited grant
self.delete(non_inher_up_link)
# Check the inherited role still applies for leaf project
- self.v3_authenticate_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(leaf_project_auth_data)
+ self.v3_create_token(root_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data)
# Delete inherited grant
self.delete(inher_up_link)
# Check the user cannot get a token on leaf project anymore
- self.v3_authenticate_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_get_token_from_inherited_group_project_role_grants(self):
# Create default scenario
@@ -2870,7 +2003,7 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self._setup_hierarchical_projects_scenario())
# Create group and add user to it
- group = self.new_group_ref(domain_id=self.domain['id'])
+ group = unit.new_group_ref(domain_id=self.domain['id'])
group = self.identity_api.create_group(group)
self.identity_api.add_user_to_group(self.user['id'], group['id'])
@@ -2885,10 +2018,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
project_id=leaf_id)
# Check the user cannot get a token on root nor leaf project
- self.v3_authenticate_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(root_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# Grant non-inherited role for group on leaf project
non_inher_gp_link = self.build_role_assignment_link(
@@ -2897,9 +2030,9 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(non_inher_gp_link)
# Check the user can only get a token on leaf project
- self.v3_authenticate_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(leaf_project_auth_data)
+ self.v3_create_token(root_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data)
# Grant inherited role for group on root project
inher_gp_link = self.build_role_assignment_link(
@@ -2908,22 +2041,22 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
self.put(inher_gp_link)
# Check the user still can get a token only on leaf project
- self.v3_authenticate_token(root_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
- self.v3_authenticate_token(leaf_project_auth_data)
+ self.v3_create_token(root_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data)
# Delete no-inherited grant
self.delete(non_inher_gp_link)
# Check the inherited role still applies for leaf project
- self.v3_authenticate_token(leaf_project_auth_data)
+ self.v3_create_token(leaf_project_auth_data)
# Delete inherited grant
self.delete(inher_gp_link)
# Check the user cannot get a token on leaf project anymore
- self.v3_authenticate_token(leaf_project_auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(leaf_project_auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_get_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments``.
@@ -3028,6 +2161,154 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
+ def test_project_id_specified_if_include_subtree_specified(self):
+ """When using include_subtree, you must specify a project ID."""
+ self.get('/role_assignments?include_subtree=True',
+ expected_status=http_client.BAD_REQUEST)
+ self.get('/role_assignments?scope.project.id&'
+ 'include_subtree=True',
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_get_role_assignments_for_project_tree(self):
+ """Get role_assignment?scope.project.id=X?include_subtree``.
+
+ Test Plan:
+
+ - Create 2 roles and a hierarchy of projects with one root and one leaf
+ - Issue the URL to add a non-inherited user role to the root project
+ and the leaf project
+ - Issue the URL to get role assignments for the root project but
+ not the subtree - this should return just the root assignment
+ - Issue the URL to get role assignments for the root project and
+ it's subtree - this should return both assignments
+ - Check that explicitly setting include_subtree to False is the
+ equivalent to not including it at all in the query.
+
+ """
+ # Create default scenario
+ root_id, leaf_id, non_inherited_role_id, unused_role_id = (
+ self._setup_hierarchical_projects_scenario())
+
+ # Grant non-inherited role to root and leaf projects
+ non_inher_entity_root = self.build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_entity_root['links']['assignment'])
+ non_inher_entity_leaf = self.build_role_assignment_entity(
+ project_id=leaf_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_entity_leaf['links']['assignment'])
+
+ # Without the subtree, we should get the one assignment on the
+ # root project
+ collection_url = (
+ '/role_assignments?scope.project.id=%(project)s' % {
+ 'project': root_id})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r, resource_url=collection_url)
+
+ self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
+ self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
+
+ # With the subtree, we should get both assignments
+ collection_url = (
+ '/role_assignments?scope.project.id=%(project)s'
+ '&include_subtree=True' % {
+ 'project': root_id})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r, resource_url=collection_url)
+
+ self.assertThat(r.result['role_assignments'], matchers.HasLength(2))
+ self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
+ self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
+
+ # With subtree=0, we should also only get the one assignment on the
+ # root project
+ collection_url = (
+ '/role_assignments?scope.project.id=%(project)s'
+ '&include_subtree=0' % {
+ 'project': root_id})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r, resource_url=collection_url)
+
+ self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
+ self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
+
+ def test_get_effective_role_assignments_for_project_tree(self):
+ """Get role_assignment ?project_id=X?include_subtree=True?effective``.
+
+ Test Plan:
+
+ - Create 2 roles and a hierarchy of projects with one root and 4 levels
+ of child project
+ - Issue the URL to add a non-inherited user role to the root project
+ and a level 1 project
+ - Issue the URL to add an inherited user role on the level 2 project
+ - Issue the URL to get effective role assignments for the level 1
+ project and it's subtree - this should return a role (non-inherited)
+ on the level 1 project and roles (inherited) on each of the level
+ 2, 3 and 4 projects
+
+ """
+ # Create default scenario
+ root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
+ self._setup_hierarchical_projects_scenario())
+
+ # Add some extra projects to the project hierarchy
+ level2 = unit.new_project_ref(domain_id=self.domain['id'],
+ parent_id=leaf_id)
+ level3 = unit.new_project_ref(domain_id=self.domain['id'],
+ parent_id=level2['id'])
+ level4 = unit.new_project_ref(domain_id=self.domain['id'],
+ parent_id=level3['id'])
+ self.resource_api.create_project(level2['id'], level2)
+ self.resource_api.create_project(level3['id'], level3)
+ self.resource_api.create_project(level4['id'], level4)
+
+ # Grant non-inherited role to root (as a spoiler) and to
+ # the level 1 (leaf) project
+ non_inher_entity_root = self.build_role_assignment_entity(
+ project_id=root_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_entity_root['links']['assignment'])
+ non_inher_entity_leaf = self.build_role_assignment_entity(
+ project_id=leaf_id, user_id=self.user['id'],
+ role_id=non_inherited_role_id)
+ self.put(non_inher_entity_leaf['links']['assignment'])
+
+ # Grant inherited role to level 2
+ inher_entity = self.build_role_assignment_entity(
+ project_id=level2['id'], user_id=self.user['id'],
+ role_id=inherited_role_id, inherited_to_projects=True)
+ self.put(inher_entity['links']['assignment'])
+
+ # Get effective role assignments
+ collection_url = (
+ '/role_assignments?scope.project.id=%(project)s'
+ '&include_subtree=True&effective' % {
+ 'project': leaf_id})
+ r = self.get(collection_url)
+ self.assertValidRoleAssignmentListResponse(
+ r, resource_url=collection_url)
+
+ # There should be three assignments returned in total
+ self.assertThat(r.result['role_assignments'], matchers.HasLength(3))
+
+ # Assert that the user does not non-inherited role on root project
+ self.assertRoleAssignmentNotInListResponse(r, non_inher_entity_root)
+
+ # Assert that the user does have non-inherited role on leaf project
+ self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
+
+ # Assert that the user has inherited role on levels 3 and 4
+ inher_entity['scope']['project']['id'] = level3['id']
+ self.assertRoleAssignmentInListResponse(r, inher_entity)
+ inher_entity['scope']['project']['id'] = level4['id']
+ self.assertRoleAssignmentInListResponse(r, inher_entity)
+
def test_get_inherited_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
@@ -3089,7 +2370,7 @@ class AssignmentInheritanceDisabledTestCase(test_v3.RestfulTestCase):
self.config_fixture.config(group='os_inherit', enabled=False)
def test_crud_inherited_role_grants_failed_if_disabled(self):
- role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
base_collection_url = (
@@ -3107,118 +2388,484 @@ class AssignmentInheritanceDisabledTestCase(test_v3.RestfulTestCase):
self.delete(member_url, expected_status=http_client.NOT_FOUND)
-class AssignmentV3toV2MethodsTestCase(unit.TestCase):
- """Test domain V3 to V2 conversion methods."""
- def _setup_initial_projects(self):
- self.project_id = uuid.uuid4().hex
- self.domain_id = CONF.identity.default_domain_id
- self.parent_id = uuid.uuid4().hex
- # Project with only domain_id in ref
- self.project1 = {'id': self.project_id,
- 'name': self.project_id,
- 'domain_id': self.domain_id}
- # Project with both domain_id and parent_id in ref
- self.project2 = {'id': self.project_id,
- 'name': self.project_id,
- 'domain_id': self.domain_id,
- 'parent_id': self.parent_id}
- # Project with no domain_id and parent_id in ref
- self.project3 = {'id': self.project_id,
- 'name': self.project_id,
- 'domain_id': self.domain_id,
- 'parent_id': self.parent_id}
- # Expected result with no domain_id and parent_id
- self.expected_project = {'id': self.project_id,
- 'name': self.project_id}
-
- def test_v2controller_filter_domain_id(self):
- # V2.0 is not domain aware, ensure domain_id is popped off the ref.
- other_data = uuid.uuid4().hex
- domain_id = CONF.identity.default_domain_id
- ref = {'domain_id': domain_id,
- 'other_data': other_data}
-
- ref_no_domain = {'other_data': other_data}
- expected_ref = ref_no_domain.copy()
-
- updated_ref = controller.V2Controller.filter_domain_id(ref)
- self.assertIs(ref, updated_ref)
- self.assertDictEqual(ref, expected_ref)
- # Make sure we don't error/muck up data if domain_id isn't present
- updated_ref = controller.V2Controller.filter_domain_id(ref_no_domain)
- self.assertIs(ref_no_domain, updated_ref)
- self.assertDictEqual(ref_no_domain, expected_ref)
-
- def test_v3controller_filter_domain_id(self):
- # No data should be filtered out in this case.
- other_data = uuid.uuid4().hex
- domain_id = uuid.uuid4().hex
- ref = {'domain_id': domain_id,
- 'other_data': other_data}
-
- expected_ref = ref.copy()
- updated_ref = controller.V3Controller.filter_domain_id(ref)
- self.assertIs(ref, updated_ref)
- self.assertDictEqual(ref, expected_ref)
-
- def test_v2controller_filter_domain(self):
- other_data = uuid.uuid4().hex
- domain_id = uuid.uuid4().hex
- non_default_domain_ref = {'domain': {'id': domain_id},
- 'other_data': other_data}
- default_domain_ref = {'domain': {'id': 'default'},
- 'other_data': other_data}
- updated_ref = controller.V2Controller.filter_domain(default_domain_ref)
- self.assertNotIn('domain', updated_ref)
- self.assertRaises(exception.Unauthorized,
- controller.V2Controller.filter_domain,
- non_default_domain_ref)
-
- def test_v2controller_filter_project_parent_id(self):
- # V2.0 is not project hierarchy aware, ensure parent_id is popped off.
- other_data = uuid.uuid4().hex
- parent_id = uuid.uuid4().hex
- ref = {'parent_id': parent_id,
- 'other_data': other_data}
-
- ref_no_parent = {'other_data': other_data}
- expected_ref = ref_no_parent.copy()
-
- updated_ref = controller.V2Controller.filter_project_parent_id(ref)
- self.assertIs(ref, updated_ref)
- self.assertDictEqual(ref, expected_ref)
- # Make sure we don't error/muck up data if parent_id isn't present
- updated_ref = controller.V2Controller.filter_project_parent_id(
- ref_no_parent)
- self.assertIs(ref_no_parent, updated_ref)
- self.assertDictEqual(ref_no_parent, expected_ref)
-
- def test_v3_to_v2_project_method(self):
- self._setup_initial_projects()
- updated_project1 = controller.V2Controller.v3_to_v2_project(
- self.project1)
- self.assertIs(self.project1, updated_project1)
- self.assertDictEqual(self.project1, self.expected_project)
- updated_project2 = controller.V2Controller.v3_to_v2_project(
- self.project2)
- self.assertIs(self.project2, updated_project2)
- self.assertDictEqual(self.project2, self.expected_project)
- updated_project3 = controller.V2Controller.v3_to_v2_project(
- self.project3)
- self.assertIs(self.project3, updated_project3)
- self.assertDictEqual(self.project3, self.expected_project)
-
- def test_v3_to_v2_project_method_list(self):
- self._setup_initial_projects()
- project_list = [self.project1, self.project2, self.project3]
- updated_list = controller.V2Controller.v3_to_v2_project(project_list)
-
- self.assertEqual(len(updated_list), len(project_list))
-
- for i, ref in enumerate(updated_list):
- # Order should not change.
- self.assertIs(ref, project_list[i])
-
- self.assertDictEqual(self.project1, self.expected_project)
- self.assertDictEqual(self.project2, self.expected_project)
- self.assertDictEqual(self.project3, self.expected_project)
+class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin,
+ unit.TestCase):
+ def _create_role(self):
+ """Call ``POST /roles``."""
+ ref = unit.new_role_ref()
+ r = self.post('/roles', body={'role': ref})
+ return self.assertValidRoleResponse(r, ref)
+
+ def test_list_implied_roles_none(self):
+ self.prior = self._create_role()
+ url = '/roles/%s/implies' % (self.prior['id'])
+ response = self.get(url).json["role_inference"]
+ self.assertEqual(self.prior['id'], response['prior_role']['id'])
+ self.assertEqual(0, len(response['implies']))
+
+ def _create_implied_role(self, prior, implied):
+ self.put('/roles/%s/implies/%s' % (prior['id'], implied['id']),
+ expected_status=http_client.CREATED)
+
+ def _delete_implied_role(self, prior, implied):
+ self.delete('/roles/%s/implies/%s' % (prior['id'], implied['id']))
+
+ def _setup_prior_two_implied(self):
+ self.prior = self._create_role()
+ self.implied1 = self._create_role()
+ self._create_implied_role(self.prior, self.implied1)
+ self.implied2 = self._create_role()
+ self._create_implied_role(self.prior, self.implied2)
+
+ def _assert_expected_implied_role_response(
+ self, expected_prior_id, expected_implied_ids):
+ r = self.get('/roles/%s/implies' % expected_prior_id)
+ response = r.json["role_inference"]
+ self.assertEqual(expected_prior_id, response['prior_role']['id'])
+
+ actual_implied_ids = [implied['id'] for implied in response['implies']]
+
+ for expected_id in expected_implied_ids:
+ self.assertIn(expected_id, actual_implied_ids)
+ self.assertEqual(len(expected_implied_ids), len(response['implies']))
+
+ self.assertIsNotNone(response['prior_role']['links']['self'])
+ for implied in response['implies']:
+ self.assertIsNotNone(implied['links']['self'])
+
+ def _assert_two_roles_implied(self):
+ self._assert_expected_implied_role_response(
+ self.prior['id'], [self.implied1['id'], self.implied2['id']])
+
+ def _assert_one_role_implied(self):
+ self._assert_expected_implied_role_response(
+ self.prior['id'], [self.implied1['id']])
+
+ self.get('/roles/%s/implies/%s' %
+ (self.prior['id'], self.implied2['id']),
+ expected_status=http_client.NOT_FOUND)
+
+ def _assert_two_rules_defined(self):
+ r = self.get('/role_inferences/')
+
+ rules = r.result['role_inferences']
+
+ self.assertEqual(self.prior['id'], rules[0]['prior_role']['id'])
+ self.assertEqual(2, len(rules[0]['implies']))
+ implied_ids = [implied['id'] for implied in rules[0]['implies']]
+ implied_names = [implied['name'] for implied in rules[0]['implies']]
+
+ self.assertIn(self.implied1['id'], implied_ids)
+ self.assertIn(self.implied2['id'], implied_ids)
+ self.assertIn(self.implied1['name'], implied_names)
+ self.assertIn(self.implied2['name'], implied_names)
+
+ def _assert_one_rule_defined(self):
+ r = self.get('/role_inferences/')
+ rules = r.result['role_inferences']
+ self.assertEqual(self.prior['id'], rules[0]['prior_role']['id'])
+ self.assertEqual(self.implied1['id'], rules[0]['implies'][0]['id'])
+ self.assertEqual(self.implied1['name'], rules[0]['implies'][0]['name'])
+ self.assertEqual(1, len(rules[0]['implies']))
+
+ def test_list_all_rules(self):
+ self._setup_prior_two_implied()
+ self._assert_two_rules_defined()
+
+ self._delete_implied_role(self.prior, self.implied2)
+ self._assert_one_rule_defined()
+
+ def test_CRD_implied_roles(self):
+
+ self._setup_prior_two_implied()
+ self._assert_two_roles_implied()
+
+ self._delete_implied_role(self.prior, self.implied2)
+ self._assert_one_role_implied()
+
+ def _create_three_roles(self):
+ self.role_list = []
+ for _ in range(3):
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+ self.role_list.append(role)
+
+ def _create_test_domain_user_project(self):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+ user = unit.create_user(self.identity_api, domain_id=domain['id'])
+ project = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project['id'], project)
+ return domain, user, project
+
+ def _assign_top_role_to_user_on_project(self, user, project):
+ self.assignment_api.add_role_to_user_and_project(
+ user['id'], project['id'], self.role_list[0]['id'])
+
+ def _build_effective_role_assignments_url(self, user):
+ return '/role_assignments?effective&user.id=%(user_id)s' % {
+ 'user_id': user['id']}
+
+ def _assert_all_roles_in_assignment(self, response, user):
+ # Now use the list role assignments api to check that all three roles
+ # appear in the collection
+ self.assertValidRoleAssignmentListResponse(
+ response,
+ expected_length=len(self.role_list),
+ resource_url=self._build_effective_role_assignments_url(user))
+
+ def _assert_initial_assignment_in_effective(self, response, user, project):
+ # The initial assignment should be there (the link url will be
+ # generated and checked automatically since it matches the assignment)
+ entity = self.build_role_assignment_entity(
+ project_id=project['id'],
+ user_id=user['id'], role_id=self.role_list[0]['id'])
+ self.assertRoleAssignmentInListResponse(response, entity)
+
+ def _assert_effective_role_for_implied_has_prior_in_links(
+ self, response, user, project, prior_index, implied_index):
+ # An effective role for an implied role will have the prior role
+ # assignment in the links
+ prior_link = '/prior_roles/%(prior)s/implies/%(implied)s' % {
+ 'prior': self.role_list[prior_index]['id'],
+ 'implied': self.role_list[implied_index]['id']}
+ link = self.build_role_assignment_link(
+ project_id=project['id'], user_id=user['id'],
+ role_id=self.role_list[prior_index]['id'])
+ entity = self.build_role_assignment_entity(
+ link=link, project_id=project['id'],
+ user_id=user['id'], role_id=self.role_list[implied_index]['id'],
+ prior_link=prior_link)
+ self.assertRoleAssignmentInListResponse(response, entity)
+
+ def test_list_role_assignments_with_implied_roles(self):
+ """Call ``GET /role_assignments`` with implied role grant.
+
+ Test Plan:
+
+ - Create a domain with a user and a project
+ - Create 3 roles
+ - Role 0 implies role 1 and role 1 implies role 2
+ - Assign the top role to the project
+ - Issue the URL to check effective roles on project - this
+ should return all 3 roles.
+ - Check the links of the 3 roles indicate the prior role where
+ appropriate
+
+ """
+ (domain, user, project) = self._create_test_domain_user_project()
+ self._create_three_roles()
+ self._create_implied_role(self.role_list[0], self.role_list[1])
+ self._create_implied_role(self.role_list[1], self.role_list[2])
+ self._assign_top_role_to_user_on_project(user, project)
+
+ response = self.get(self._build_effective_role_assignments_url(user))
+ r = response
+
+ self._assert_all_roles_in_assignment(r, user)
+ self._assert_initial_assignment_in_effective(response, user, project)
+ self._assert_effective_role_for_implied_has_prior_in_links(
+ response, user, project, 0, 1)
+ self._assert_effective_role_for_implied_has_prior_in_links(
+ response, user, project, 1, 2)
+
+ def _create_named_role(self, name):
+ role = unit.new_role_ref()
+ role['name'] = name
+ self.role_api.create_role(role['id'], role)
+ return role
+
+ def test_root_role_as_implied_role_forbidden(self):
+ """Test root role is forbidden to be set as an implied role.
+
+ Create 2 roles that are prohibited from being an implied role.
+ Create 1 additional role which should be accepted as an implied
+ role. Assure the prohibited role names cannot be set as an implied
+ role. Assure the accepted role name which is not a member of the
+ prohibited implied role list can be successfully set an implied
+ role.
+ """
+ prohibited_name1 = 'root1'
+ prohibited_name2 = 'root2'
+ accepted_name1 = 'implied1'
+
+ prohibited_names = [prohibited_name1, prohibited_name2]
+ self.config_fixture.config(group='assignment',
+ prohibited_implied_role=prohibited_names)
+
+ prior_role = self._create_role()
+
+ prohibited_role1 = self._create_named_role(prohibited_name1)
+ url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
+ prior_role_id=prior_role['id'],
+ implied_role_id=prohibited_role1['id'])
+ self.put(url, expected_status=http_client.FORBIDDEN)
+
+ prohibited_role2 = self._create_named_role(prohibited_name2)
+ url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
+ prior_role_id=prior_role['id'],
+ implied_role_id=prohibited_role2['id'])
+ self.put(url, expected_status=http_client.FORBIDDEN)
+
+ accepted_role1 = self._create_named_role(accepted_name1)
+ url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
+ prior_role_id=prior_role['id'],
+ implied_role_id=accepted_role1['id'])
+ self.put(url, expected_status=http_client.CREATED)
+
+ def test_trusts_from_implied_role(self):
+ self._create_three_roles()
+ self._create_implied_role(self.role_list[0], self.role_list[1])
+ self._create_implied_role(self.role_list[1], self.role_list[2])
+ self._assign_top_role_to_user_on_project(self.user, self.project)
+
+ # Create a trustee and assign the prior role to her
+ trustee = unit.create_user(self.identity_api, domain_id=self.domain_id)
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user['id'],
+ trustee_user_id=trustee['id'],
+ project_id=self.project['id'],
+ role_ids=[self.role_list[0]['id']])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = r.result['trust']
+
+ # Only the role that was specified is in the trust, NOT implied roles
+ self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id'])
+ self.assertThat(trust['roles'], matchers.HasLength(1))
+
+ # Authenticate as the trustee
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'],
+ trust_id=trust['id'])
+ r = self.v3_create_token(auth_data)
+ token = r.result['token']
+ self.assertThat(token['roles'],
+ matchers.HasLength(len(self.role_list)))
+ for role in token['roles']:
+ self.assertIn(role, self.role_list)
+ for role in self.role_list:
+ self.assertIn(role, token['roles'])
+
+ def test_trusts_from_domain_specific_implied_role(self):
+ self._create_three_roles()
+ # Overwrite the first role with a domain specific role
+ role = unit.new_role_ref(domain_id=self.domain_id)
+ self.role_list[0] = self.role_api.create_role(role['id'], role)
+ self._create_implied_role(self.role_list[0], self.role_list[1])
+ self._create_implied_role(self.role_list[1], self.role_list[2])
+ self._assign_top_role_to_user_on_project(self.user, self.project)
+
+ # Create a trustee and assign the prior role to her
+ trustee = unit.create_user(self.identity_api, domain_id=self.domain_id)
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user['id'],
+ trustee_user_id=trustee['id'],
+ project_id=self.project['id'],
+ role_ids=[self.role_list[0]['id']])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = r.result['trust']
+
+ # Only the role that was specified is in the trust, NOT implied roles
+ self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id'])
+ self.assertThat(trust['roles'], matchers.HasLength(1))
+
+ # Authenticate as the trustee
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'],
+ trust_id=trust['id'])
+ r = self.v3_create_token(auth_data)
+ token = r.result['token']
+
+ # The token should have the roles implies by the domain specific role,
+ # but not the domain specific role itself.
+ self.assertThat(token['roles'],
+ matchers.HasLength(len(self.role_list) - 1))
+ for role in token['roles']:
+ self.assertIn(role, self.role_list)
+ for role in [self.role_list[1], self.role_list[2]]:
+ self.assertIn(role, token['roles'])
+ self.assertNotIn(self.role_list[0], token['roles'])
+
+
+class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase):
+ def setUp(self):
+ def create_role(domain_id=None):
+ """Call ``POST /roles``."""
+ ref = unit.new_role_ref(domain_id=domain_id)
+ r = self.post(
+ '/roles',
+ body={'role': ref})
+ return self.assertValidRoleResponse(r, ref)
+
+ super(DomainSpecificRoleTests, self).setUp()
+ self.domainA = unit.new_domain_ref()
+ self.resource_api.create_domain(self.domainA['id'], self.domainA)
+ self.domainB = unit.new_domain_ref()
+ self.resource_api.create_domain(self.domainB['id'], self.domainB)
+
+ self.global_role1 = create_role()
+ self.global_role2 = create_role()
+ # Since there maybe other global roles already created, let's count
+ # them, so we can ensure we can check subsequent list responses
+ # are correct
+ r = self.get('/roles')
+ self.existing_global_roles = len(r.result['roles'])
+
+ # And now create some domain specific roles
+ self.domainA_role1 = create_role(domain_id=self.domainA['id'])
+ self.domainA_role2 = create_role(domain_id=self.domainA['id'])
+ self.domainB_role = create_role(domain_id=self.domainB['id'])
+
+ def test_get_and_list_domain_specific_roles(self):
+ # Check we can get a domain specific role
+ r = self.get('/roles/%s' % self.domainA_role1['id'])
+ self.assertValidRoleResponse(r, self.domainA_role1)
+
+ # If we list without specifying a domain, we should only get global
+ # roles back.
+ r = self.get('/roles')
+ self.assertValidRoleListResponse(
+ r, expected_length=self.existing_global_roles)
+ self.assertRoleInListResponse(r, self.global_role1)
+ self.assertRoleInListResponse(r, self.global_role2)
+ self.assertRoleNotInListResponse(r, self.domainA_role1)
+ self.assertRoleNotInListResponse(r, self.domainA_role2)
+ self.assertRoleNotInListResponse(r, self.domainB_role)
+
+ # Now list those in domainA, making sure that's all we get back
+ r = self.get('/roles?domain_id=%s' % self.domainA['id'])
+ self.assertValidRoleListResponse(r, expected_length=2)
+ self.assertRoleInListResponse(r, self.domainA_role1)
+ self.assertRoleInListResponse(r, self.domainA_role2)
+
+ def test_update_domain_specific_roles(self):
+ self.domainA_role1['name'] = uuid.uuid4().hex
+ self.patch('/roles/%(role_id)s' % {
+ 'role_id': self.domainA_role1['id']},
+ body={'role': self.domainA_role1})
+ r = self.get('/roles/%s' % self.domainA_role1['id'])
+ self.assertValidRoleResponse(r, self.domainA_role1)
+
+ def test_delete_domain_specific_roles(self):
+ # Check delete only removes that one domain role
+ self.delete('/roles/%(role_id)s' % {
+ 'role_id': self.domainA_role1['id']})
+
+ self.get('/roles/%s' % self.domainA_role1['id'],
+ expected_status=http_client.NOT_FOUND)
+ # Now re-list those in domainA, making sure there's only one left
+ r = self.get('/roles?domain_id=%s' % self.domainA['id'])
+ self.assertValidRoleListResponse(r, expected_length=1)
+ self.assertRoleInListResponse(r, self.domainA_role2)
+
+
+class ListUserProjectsTestCase(test_v3.RestfulTestCase):
+ """Tests for /users/<user>/projects"""
+
+ def load_sample_data(self):
+ # do not load base class's data, keep it focused on the tests
+
+ self.auths = []
+ self.domains = []
+ self.projects = []
+ self.roles = []
+ self.users = []
+
+ # Create 3 sets of domain, roles, projects, and users to demonstrate
+ # the right user's data is loaded and only projects they can access
+ # are returned.
+
+ for _ in range(3):
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+
+ user = unit.create_user(self.identity_api, domain_id=domain['id'])
+
+ role = unit.new_role_ref()
+ self.role_api.create_role(role['id'], role)
+
+ self.assignment_api.create_grant(role['id'],
+ user_id=user['id'],
+ domain_id=domain['id'])
+
+ project = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project['id'], project)
+
+ self.assignment_api.create_grant(role['id'],
+ user_id=user['id'],
+ project_id=project['id'])
+
+ auth = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ domain_id=domain['id'])
+
+ self.auths.append(auth)
+ self.domains.append(domain)
+ self.projects.append(project)
+ self.roles.append(role)
+ self.users.append(user)
+
+ def test_list_all(self):
+ for i in range(len(self.users)):
+ user = self.users[i]
+ auth = self.auths[i]
+
+ url = '/users/%s/projects' % user['id']
+ result = self.get(url, auth=auth)
+ projects_result = result.json['projects']
+ self.assertEqual(1, len(projects_result))
+ self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
+
+ def test_list_enabled(self):
+ for i in range(len(self.users)):
+ user = self.users[i]
+ auth = self.auths[i]
+
+ # There are no disabled projects
+ url = '/users/%s/projects?enabled=True' % user['id']
+ result = self.get(url, auth=auth)
+ projects_result = result.json['projects']
+ self.assertEqual(1, len(projects_result))
+ self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
+
+ def test_list_disabled(self):
+ for i in range(len(self.users)):
+ user = self.users[i]
+ auth = self.auths[i]
+ project = self.projects[i]
+
+ # There are no disabled projects
+ url = '/users/%s/projects?enabled=False' % user['id']
+ result = self.get(url, auth=auth)
+ self.assertEqual(0, len(result.json['projects']))
+
+ # disable this one and check again
+ project['enabled'] = False
+ self.resource_api.update_project(project['id'], project)
+ result = self.get(url, auth=auth)
+ projects_result = result.json['projects']
+ self.assertEqual(1, len(projects_result))
+ self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
+
+ def test_list_by_domain_id(self):
+ for i in range(len(self.users)):
+ user = self.users[i]
+ domain = self.domains[i]
+ auth = self.auths[i]
+
+ # Try looking for projects with a non-existent domain_id
+ url = '/users/%s/projects?domain_id=%s' % (user['id'],
+ uuid.uuid4().hex)
+ result = self.get(url, auth=auth)
+ self.assertEqual(0, len(result.json['projects']))
+
+ # Now try a valid one
+ url = '/users/%s/projects?domain_id=%s' % (user['id'],
+ domain['id'])
+ result = self.get(url, auth=auth)
+ projects_result = result.json['projects']
+ self.assertEqual(1, len(projects_result))
+ self.assertEqual(self.projects[i]['id'], projects_result[0]['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_v3_auth.py b/keystone-moon/keystone/tests/unit/test_v3_auth.py
index d53a85df..698feeb8 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_auth.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_auth.py
@@ -14,6 +14,7 @@
import copy
import datetime
+import itertools
import json
import operator
import uuid
@@ -21,6 +22,8 @@ import uuid
from keystoneclient.common import cms
import mock
from oslo_config import cfg
+from oslo_log import versionutils
+from oslo_utils import fixture
from oslo_utils import timeutils
from six.moves import http_client
from six.moves import range
@@ -28,9 +31,12 @@ from testtools import matchers
from testtools import testcase
from keystone import auth
+from keystone.auth.plugins import totp
from keystone.common import utils
+from keystone.contrib.revoke import routers
from keystone import exception
from keystone.policy.backends import rules
+from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import ksfixtures
from keystone.tests.unit import test_v3
@@ -38,7 +44,7 @@ from keystone.tests.unit import test_v3
CONF = cfg.CONF
-class TestAuthInfo(test_v3.AuthTestMixin, testcase.TestCase):
+class TestAuthInfo(common_auth.AuthTestMixin, testcase.TestCase):
def setUp(self):
super(TestAuthInfo, self).setUp()
auth.controllers.load_auth_methods()
@@ -121,7 +127,7 @@ class TokenAPITests(object):
# resolved in Python for multiple inheritance means that a setUp in this
# would get skipped by the testrunner.
def doSetUp(self):
- r = self.v3_authenticate_token(self.build_authentication_request(
+ r = self.v3_create_token(self.build_authentication_request(
username=self.user['name'],
user_domain_id=self.domain_id,
password=self.user['password']))
@@ -129,43 +135,372 @@ class TokenAPITests(object):
self.v3_token = r.headers.get('X-Subject-Token')
self.headers = {'X-Subject-Token': r.headers.get('X-Subject-Token')}
- def test_default_fixture_scope_token(self):
- self.assertIsNotNone(self.get_scoped_token())
+ def _make_auth_request(self, auth_data):
+ resp = self.post('/auth/tokens', body=auth_data)
+ token = resp.headers.get('X-Subject-Token')
+ return token
- def test_v3_v2_intermix_non_default_domain_failed(self):
- v3_token = self.get_requested_token(self.build_authentication_request(
+ def _get_unscoped_token(self):
+ auth_data = self.build_authentication_request(
user_id=self.user['id'],
- password=self.user['password']))
+ password=self.user['password'])
+ return self._make_auth_request(auth_data)
- # now validate the v3 token with v2 API
- self.admin_request(
- path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token,
- method='GET',
- expected_status=http_client.UNAUTHORIZED)
+ def _get_domain_scoped_token(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain_id)
+ return self._make_auth_request(auth_data)
+
+ def _get_project_scoped_token(self):
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project_id)
+ return self._make_auth_request(auth_data)
+
+ def _get_trust_scoped_token(self, trustee_user, trust):
+ auth_data = self.build_authentication_request(
+ user_id=trustee_user['id'],
+ password=trustee_user['password'],
+ trust_id=trust['id'])
+ return self._make_auth_request(auth_data)
+
+ def _create_trust(self, impersonation=False):
+ # Create a trustee user
+ trustee_user = unit.create_user(self.identity_api,
+ domain_id=self.domain_id)
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=trustee_user['id'],
+ project_id=self.project_id,
+ impersonation=impersonation,
+ role_ids=[self.role_id])
+
+ # Create a trust
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+ return (trustee_user, trust)
+
+ def _validate_token(self, token, expected_status=http_client.OK):
+ return self.get(
+ '/auth/tokens',
+ headers={'X-Subject-Token': token},
+ expected_status=expected_status)
+
+ def _revoke_token(self, token, expected_status=http_client.NO_CONTENT):
+ return self.delete(
+ '/auth/tokens',
+ headers={'x-subject-token': token},
+ expected_status=expected_status)
+
+ def _set_user_enabled(self, user, enabled=True):
+ user['enabled'] = enabled
+ self.identity_api.update_user(user['id'], user)
+
+ def test_validate_unscoped_token(self):
+ unscoped_token = self._get_unscoped_token()
+ self._validate_token(unscoped_token)
+
+ def test_revoke_unscoped_token(self):
+ unscoped_token = self._get_unscoped_token()
+ self._validate_token(unscoped_token)
+ self._revoke_token(unscoped_token)
+ self._validate_token(unscoped_token,
+ expected_status=http_client.NOT_FOUND)
+
+ def test_unscoped_token_is_invalid_after_disabling_user(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Disable the user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_unscoped_token_is_invalid_after_enabling_disabled_user(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Disable the user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+ # Enable the user
+ self._set_user_enabled(self.user)
+ # Ensure validating a token for a re-enabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_unscoped_token_is_invalid_after_disabling_user_domain(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Disable the user's domain
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_unscoped_token_is_invalid_after_changing_user_password(self):
+ unscoped_token = self._get_unscoped_token()
+ # Make sure the token is valid
+ self._validate_token(unscoped_token)
+ # Change user's password
+ self.user['password'] = 'Password1'
+ self.identity_api.update_user(self.user['id'], self.user)
+ # Ensure updating user's password revokes existing user's tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ unscoped_token)
+
+ def test_validate_domain_scoped_token(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ resp = self._validate_token(domain_scoped_token)
+ resp_json = json.loads(resp.body)
+ self.assertIsNotNone(resp_json['token']['catalog'])
+ self.assertIsNotNone(resp_json['token']['roles'])
+ self.assertIsNotNone(resp_json['token']['domain'])
+
+ def test_domain_scoped_token_is_invalid_after_disabling_user(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(domain_scoped_token)
+ # Disable user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ domain_scoped_token)
+
+ def test_domain_scoped_token_is_invalid_after_deleting_grant(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(domain_scoped_token)
+ # Delete access to domain
+ self.assignment_api.delete_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ domain_scoped_token)
+
+ def test_domain_scoped_token_invalid_after_disabling_domain(self):
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+ domain_scoped_token = self._get_domain_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(domain_scoped_token)
+ # Disable domain
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+ # Ensure validating a token for a disabled domain fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ domain_scoped_token)
+
+ def test_v2_validate_domain_scoped_token_returns_unauthorized(self):
+ # Test that validating a domain scoped token in v2.0 returns
+ # unauthorized.
+ # Grant user access to domain
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+
+ scoped_token = self._get_domain_scoped_token()
+ self.assertRaises(exception.Unauthorized,
+ self.token_provider_api.validate_v2_token,
+ scoped_token)
+
+ def test_validate_project_scoped_token(self):
+ project_scoped_token = self._get_project_scoped_token()
+ self._validate_token(project_scoped_token)
+
+ def test_revoke_project_scoped_token(self):
+ project_scoped_token = self._get_project_scoped_token()
+ self._validate_token(project_scoped_token)
+ self._revoke_token(project_scoped_token)
+ self._validate_token(project_scoped_token,
+ expected_status=http_client.NOT_FOUND)
+
+ def test_project_scoped_token_is_invalid_after_disabling_user(self):
+ project_scoped_token = self._get_project_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(project_scoped_token)
+ # Disable the user
+ self._set_user_enabled(self.user, enabled=False)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ project_scoped_token)
+
+ def test_project_scoped_token_invalid_after_changing_user_password(self):
+ project_scoped_token = self._get_project_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(project_scoped_token)
+ # Update user's password
+ self.user['password'] = 'Password1'
+ self.identity_api.update_user(self.user['id'], self.user)
+ # Ensure updating user's password revokes existing tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ project_scoped_token)
+
+ def test_project_scoped_token_invalid_after_disabling_project(self):
+ project_scoped_token = self._get_project_scoped_token()
+ # Make sure the token is valid
+ self._validate_token(project_scoped_token)
+ # Disable project
+ self.project['enabled'] = False
+ self.resource_api.update_project(self.project['id'], self.project)
+ # Ensure validating a token for a disabled project fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ project_scoped_token)
+
+ def test_rescope_unscoped_token_with_trust(self):
+ trustee_user, trust = self._create_trust()
+ self._get_trust_scoped_token(trustee_user, trust)
+
+ def test_validate_a_trust_scoped_token(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ def test_validate_a_trust_scoped_token_impersonated(self):
+ trustee_user, trust = self._create_trust(impersonation=True)
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ def test_revoke_trust_scoped_token(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+ self._revoke_token(trust_scoped_token)
+ self._validate_token(trust_scoped_token,
+ expected_status=http_client.NOT_FOUND)
+
+ def test_trust_scoped_token_is_invalid_after_disabling_trustee(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Disable trustee
+ trustee_update_ref = dict(enabled=False)
+ self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_invalid_after_changing_trustee_password(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+ # Change trustee's password
+ trustee_update_ref = dict(password='Password1')
+ self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
+ # Ensure updating trustee's password revokes existing tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_is_invalid_after_disabling_trustor(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Disable the trustor
+ trustor_update_ref = dict(enabled=False)
+ self.identity_api.update_user(self.user['id'], trustor_update_ref)
+ # Ensure validating a token for a disabled user fails
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_invalid_after_changing_trustor_password(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Change trustor's password
+ trustor_update_ref = dict(password='Password1')
+ self.identity_api.update_user(self.user['id'], trustor_update_ref)
+ # Ensure updating trustor's password revokes existing user's tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_trust_scoped_token_invalid_after_disabled_trustor_domain(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Validate a trust scoped token
+ self._validate_token(trust_scoped_token)
+
+ # Disable trustor's domain
+ self.domain['enabled'] = False
+ self.resource_api.update_domain(self.domain['id'], self.domain)
+
+ trustor_update_ref = dict(password='Password1')
+ self.identity_api.update_user(self.user['id'], trustor_update_ref)
+ # Ensure updating trustor's password revokes existing user's tokens
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ trust_scoped_token)
+
+ def test_v2_validate_trust_scoped_token(self):
+ # Test that validating an trust scoped token in v2.0 returns
+ # unauthorized.
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ self.assertRaises(exception.Unauthorized,
+ self.token_provider_api.validate_v2_token,
+ trust_scoped_token)
+
+ def test_default_fixture_scope_token(self):
+ self.assertIsNotNone(self.get_scoped_token())
def test_v3_v2_intermix_new_default_domain(self):
# If the default_domain_id config option is changed, then should be
# able to validate a v3 token with user in the new domain.
# 1) Create a new domain for the user.
- new_domain = {
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- }
+ new_domain = unit.new_domain_ref()
self.resource_api.create_domain(new_domain['id'], new_domain)
# 2) Create user in new domain.
- new_user_password = uuid.uuid4().hex
- new_user = {
- 'name': uuid.uuid4().hex,
- 'domain_id': new_domain['id'],
- 'password': new_user_password,
- 'email': uuid.uuid4().hex,
- }
- new_user = self.identity_api.create_user(new_user)
+ new_user = unit.create_user(self.identity_api,
+ domain_id=new_domain['id'])
# 3) Update the default_domain_id config option to the new domain
self.config_fixture.config(
@@ -175,12 +510,12 @@ class TokenAPITests(object):
# 4) Get a token using v3 API.
v3_token = self.get_requested_token(self.build_authentication_request(
user_id=new_user['id'],
- password=new_user_password))
+ password=new_user['password']))
# 5) Validate token using v2 API.
self.admin_request(
path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token,
+ token=self.get_admin_token(),
method='GET')
def test_v3_v2_intermix_domain_scoped_token_failed(self):
@@ -199,10 +534,10 @@ class TokenAPITests(object):
self.admin_request(
method='GET',
path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token,
+ token=self.get_admin_token(),
expected_status=http_client.UNAUTHORIZED)
- def test_v3_v2_intermix_non_default_project_failed(self):
+ def test_v3_v2_intermix_non_default_project_succeed(self):
# self.project is in a non-default domain
v3_token = self.get_requested_token(self.build_authentication_request(
user_id=self.default_domain_user['id'],
@@ -213,10 +548,9 @@ class TokenAPITests(object):
self.admin_request(
method='GET',
path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token,
- expected_status=http_client.UNAUTHORIZED)
+ token=self.get_admin_token())
- def test_v3_v2_intermix_non_default_user_failed(self):
+ def test_v3_v2_intermix_non_default_user_succeed(self):
self.assignment_api.create_grant(
self.role['id'],
user_id=self.user['id'],
@@ -232,8 +566,7 @@ class TokenAPITests(object):
self.admin_request(
method='GET',
path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token,
- expected_status=http_client.UNAUTHORIZED)
+ token=self.get_admin_token())
def test_v3_v2_intermix_domain_scope_failed(self):
self.assignment_api.create_grant(
@@ -249,12 +582,12 @@ class TokenAPITests(object):
# v2 cannot reference projects outside the default domain
self.admin_request(
path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token,
+ token=self.get_admin_token(),
method='GET',
expected_status=http_client.UNAUTHORIZED)
def test_v3_v2_unscoped_token_intermix(self):
- r = self.v3_authenticate_token(self.build_authentication_request(
+ r = self.v3_create_token(self.build_authentication_request(
user_id=self.default_domain_user['id'],
password=self.default_domain_user['password']))
self.assertValidUnscopedTokenResponse(r)
@@ -264,7 +597,7 @@ class TokenAPITests(object):
# now validate the v3 token with v2 API
r = self.admin_request(
path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token,
+ token=self.get_admin_token(),
method='GET')
v2_token_data = r.result
@@ -278,7 +611,7 @@ class TokenAPITests(object):
def test_v3_v2_token_intermix(self):
# FIXME(gyee): PKI tokens are not interchangeable because token
# data is baked into the token itself.
- r = self.v3_authenticate_token(self.build_authentication_request(
+ r = self.v3_create_token(self.build_authentication_request(
user_id=self.default_domain_user['id'],
password=self.default_domain_user['password'],
project_id=self.default_domain_project['id']))
@@ -290,7 +623,7 @@ class TokenAPITests(object):
r = self.admin_request(
method='GET',
path='/v2.0/tokens/%s' % v3_token,
- token=CONF.admin_token)
+ token=self.get_admin_token())
v2_token_data = r.result
self.assertEqual(v2_token_data['access']['user']['id'],
@@ -318,9 +651,7 @@ class TokenAPITests(object):
v2_token = v2_token_data['access']['token']['id']
r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token})
- # FIXME(dolph): Due to bug 1476329, v2 tokens validated on v3 are
- # missing timezones, so they will not pass this assertion.
- # self.assertValidUnscopedTokenResponse(r)
+ self.assertValidUnscopedTokenResponse(r)
v3_token_data = r.result
self.assertEqual(v2_token_data['access']['user']['id'],
@@ -347,9 +678,7 @@ class TokenAPITests(object):
v2_token = v2_token_data['access']['token']['id']
r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token})
- # FIXME(dolph): Due to bug 1476329, v2 tokens validated on v3 are
- # missing timezones, so they will not pass this assertion.
- # self.assertValidProjectScopedTokenResponse(r)
+ self.assertValidProjectScopedTokenResponse(r)
v3_token_data = r.result
self.assertEqual(v2_token_data['access']['user']['id'],
@@ -384,9 +713,8 @@ class TokenAPITests(object):
v2_token = r.result['access']['token']['id']
# Delete the v2 token using v3.
- resp = self.delete(
+ self.delete(
'/auth/tokens', headers={'X-Subject-Token': v2_token})
- self.assertEqual(resp.status_code, 204)
# Attempting to use the deleted token on v2 should fail.
self.admin_request(
@@ -397,7 +725,7 @@ class TokenAPITests(object):
expires = self.v3_token_data['token']['expires_at']
# rescope the token
- r = self.v3_authenticate_token(self.build_authentication_request(
+ r = self.v3_create_token(self.build_authentication_request(
token=self.v3_token,
project_id=self.project_id))
self.assertValidProjectScopedTokenResponse(r)
@@ -406,12 +734,24 @@ class TokenAPITests(object):
self.assertEqual(expires, r.result['token']['expires_at'])
def test_check_token(self):
- self.head('/auth/tokens', headers=self.headers, expected_status=200)
+ self.head('/auth/tokens', headers=self.headers,
+ expected_status=http_client.OK)
def test_validate_token(self):
r = self.get('/auth/tokens', headers=self.headers)
self.assertValidUnscopedTokenResponse(r)
+ def test_validate_missing_subject_token(self):
+ self.get('/auth/tokens',
+ expected_status=http_client.NOT_FOUND)
+
+ def test_validate_missing_auth_token(self):
+ self.admin_request(
+ method='GET',
+ path='/v3/projects',
+ token=None,
+ expected_status=http_client.UNAUTHORIZED)
+
def test_validate_token_nocatalog(self):
v3_token = self.get_requested_token(self.build_authentication_request(
user_id=self.user['id'],
@@ -422,6 +762,399 @@ class TokenAPITests(object):
headers={'X-Subject-Token': v3_token})
self.assertValidProjectScopedTokenResponse(r, require_catalog=False)
+ def test_is_admin_token_by_ids(self):
+ self.config_fixture.config(
+ group='resource',
+ admin_project_domain_name=self.domain['name'],
+ admin_project_name=self.project['name'])
+ r = self.v3_create_token(self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id']))
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
+ v3_token = r.headers.get('X-Subject-Token')
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
+
+ def test_is_admin_token_by_names(self):
+ self.config_fixture.config(
+ group='resource',
+ admin_project_domain_name=self.domain['name'],
+ admin_project_name=self.project['name'])
+ r = self.v3_create_token(self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_domain_name=self.domain['name'],
+ project_name=self.project['name']))
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
+ v3_token = r.headers.get('X-Subject-Token')
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=True)
+
+ def test_token_for_non_admin_project_is_not_admin(self):
+ self.config_fixture.config(
+ group='resource',
+ admin_project_domain_name=self.domain['name'],
+ admin_project_name=uuid.uuid4().hex)
+ r = self.v3_create_token(self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id']))
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
+ v3_token = r.headers.get('X-Subject-Token')
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
+
+ def test_token_for_non_admin_domain_same_project_name_is_not_admin(self):
+ self.config_fixture.config(
+ group='resource',
+ admin_project_domain_name=uuid.uuid4().hex,
+ admin_project_name=self.project['name'])
+ r = self.v3_create_token(self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id']))
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
+ v3_token = r.headers.get('X-Subject-Token')
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
+
+ def test_only_admin_project_set_acts_as_non_admin(self):
+ self.config_fixture.config(
+ group='resource',
+ admin_project_name=self.project['name'])
+ r = self.v3_create_token(self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id']))
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
+ v3_token = r.headers.get('X-Subject-Token')
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
+ self.assertValidProjectScopedTokenResponse(r, is_admin_project=False)
+
+ def _create_role(self, domain_id=None):
+ """Call ``POST /roles``."""
+ ref = unit.new_role_ref(domain_id=domain_id)
+ r = self.post('/roles', body={'role': ref})
+ return self.assertValidRoleResponse(r, ref)
+
+ def _create_implied_role(self, prior_id):
+ implied = self._create_role()
+ url = '/roles/%s/implies/%s' % (prior_id, implied['id'])
+ self.put(url, expected_status=http_client.CREATED)
+ return implied
+
+ def _delete_implied_role(self, prior_role_id, implied_role_id):
+ url = '/roles/%s/implies/%s' % (prior_role_id, implied_role_id)
+ self.delete(url)
+
+ def _get_scoped_token_roles(self, is_domain=False):
+ if is_domain:
+ v3_token = self.get_domain_scoped_token()
+ else:
+ v3_token = self.get_scoped_token()
+
+ r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token})
+ v3_token_data = r.result
+ token_roles = v3_token_data['token']['roles']
+ return token_roles
+
+ def _create_implied_role_shows_in_v3_token(self, is_domain):
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(1, len(token_roles))
+
+ prior = token_roles[0]['id']
+ implied1 = self._create_implied_role(prior)
+
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(2, len(token_roles))
+
+ implied2 = self._create_implied_role(prior)
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(3, len(token_roles))
+
+ token_role_ids = [role['id'] for role in token_roles]
+ self.assertIn(prior, token_role_ids)
+ self.assertIn(implied1['id'], token_role_ids)
+ self.assertIn(implied2['id'], token_role_ids)
+
+ def test_create_implied_role_shows_in_v3_project_token(self):
+ # regardless of the default chosen, this should always
+ # test with the option set.
+ self.config_fixture.config(group='token', infer_roles=True)
+ self._create_implied_role_shows_in_v3_token(False)
+
+ def test_create_implied_role_shows_in_v3_domain_token(self):
+ self.config_fixture.config(group='token', infer_roles=True)
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.user['id'],
+ domain_id=self.domain['id'])
+
+ self._create_implied_role_shows_in_v3_token(True)
+
+ def test_group_assigned_implied_role_shows_in_v3_token(self):
+ self.config_fixture.config(group='token', infer_roles=True)
+ is_domain = False
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(1, len(token_roles))
+
+ new_role = self._create_role()
+ prior = new_role['id']
+
+ new_group_ref = unit.new_group_ref(domain_id=self.domain['id'])
+ new_group = self.identity_api.create_group(new_group_ref)
+ self.assignment_api.create_grant(prior,
+ group_id=new_group['id'],
+ project_id=self.project['id'])
+
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(1, len(token_roles))
+
+ self.identity_api.add_user_to_group(self.user['id'],
+ new_group['id'])
+
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(2, len(token_roles))
+
+ implied1 = self._create_implied_role(prior)
+
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(3, len(token_roles))
+
+ implied2 = self._create_implied_role(prior)
+ token_roles = self._get_scoped_token_roles(is_domain)
+ self.assertEqual(4, len(token_roles))
+
+ token_role_ids = [role['id'] for role in token_roles]
+ self.assertIn(prior, token_role_ids)
+ self.assertIn(implied1['id'], token_role_ids)
+ self.assertIn(implied2['id'], token_role_ids)
+
+ def test_multiple_implied_roles_show_in_v3_token(self):
+ self.config_fixture.config(group='token', infer_roles=True)
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(1, len(token_roles))
+
+ prior = token_roles[0]['id']
+ implied1 = self._create_implied_role(prior)
+ implied2 = self._create_implied_role(prior)
+ implied3 = self._create_implied_role(prior)
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(4, len(token_roles))
+
+ token_role_ids = [role['id'] for role in token_roles]
+ self.assertIn(prior, token_role_ids)
+ self.assertIn(implied1['id'], token_role_ids)
+ self.assertIn(implied2['id'], token_role_ids)
+ self.assertIn(implied3['id'], token_role_ids)
+
+ def test_chained_implied_role_shows_in_v3_token(self):
+ self.config_fixture.config(group='token', infer_roles=True)
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(1, len(token_roles))
+
+ prior = token_roles[0]['id']
+ implied1 = self._create_implied_role(prior)
+ implied2 = self._create_implied_role(implied1['id'])
+ implied3 = self._create_implied_role(implied2['id'])
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(4, len(token_roles))
+
+ token_role_ids = [role['id'] for role in token_roles]
+
+ self.assertIn(prior, token_role_ids)
+ self.assertIn(implied1['id'], token_role_ids)
+ self.assertIn(implied2['id'], token_role_ids)
+ self.assertIn(implied3['id'], token_role_ids)
+
+ def test_implied_role_disabled_by_config(self):
+ self.config_fixture.config(group='token', infer_roles=False)
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(1, len(token_roles))
+
+ prior = token_roles[0]['id']
+ implied1 = self._create_implied_role(prior)
+ implied2 = self._create_implied_role(implied1['id'])
+ self._create_implied_role(implied2['id'])
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(1, len(token_roles))
+ token_role_ids = [role['id'] for role in token_roles]
+ self.assertIn(prior, token_role_ids)
+
+ def test_delete_implied_role_do_not_show_in_v3_token(self):
+ self.config_fixture.config(group='token', infer_roles=True)
+ token_roles = self._get_scoped_token_roles()
+ prior = token_roles[0]['id']
+ implied = self._create_implied_role(prior)
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(2, len(token_roles))
+ self._delete_implied_role(prior, implied['id'])
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(1, len(token_roles))
+
+ def test_unrelated_implied_roles_do_not_change_v3_token(self):
+ self.config_fixture.config(group='token', infer_roles=True)
+ token_roles = self._get_scoped_token_roles()
+ prior = token_roles[0]['id']
+ implied = self._create_implied_role(prior)
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(2, len(token_roles))
+
+ unrelated = self._create_role()
+ url = '/roles/%s/implies/%s' % (unrelated['id'], implied['id'])
+ self.put(url, expected_status=http_client.CREATED)
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(2, len(token_roles))
+
+ self._delete_implied_role(unrelated['id'], implied['id'])
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(2, len(token_roles))
+
+ def test_domain_scpecific_roles_do_not_show_v3_token(self):
+ self.config_fixture.config(group='token', infer_roles=True)
+ initial_token_roles = self._get_scoped_token_roles()
+
+ new_role = self._create_role(domain_id=self.domain_id)
+ self.assignment_api.create_grant(new_role['id'],
+ user_id=self.user['id'],
+ project_id=self.project['id'])
+ implied = self._create_implied_role(new_role['id'])
+
+ token_roles = self._get_scoped_token_roles()
+ self.assertEqual(len(initial_token_roles) + 1, len(token_roles))
+
+ # The implied role from the domain specific role should be in the
+ # token, but not the domain specific role itself.
+ token_role_ids = [role['id'] for role in token_roles]
+ self.assertIn(implied['id'], token_role_ids)
+ self.assertNotIn(new_role['id'], token_role_ids)
+
+ def test_remove_all_roles_from_scope_result_in_404(self):
+ # create a new user
+ new_user = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+
+ # give the new user a role on a project
+ path = '/projects/%s/users/%s/roles/%s' % (
+ self.project['id'], new_user['id'], self.role['id'])
+ self.put(path=path)
+
+ # authenticate as the new user and get a project-scoped token
+ auth_data = self.build_authentication_request(
+ user_id=new_user['id'],
+ password=new_user['password'],
+ project_id=self.project['id'])
+ subject_token_id = self.v3_create_token(auth_data).headers.get(
+ 'X-Subject-Token')
+
+ # make sure the project-scoped token is valid
+ headers = {'X-Subject-Token': subject_token_id}
+ r = self.get('/auth/tokens', headers=headers)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ # remove the roles from the user for the given scope
+ path = '/projects/%s/users/%s/roles/%s' % (
+ self.project['id'], new_user['id'], self.role['id'])
+ self.delete(path=path)
+
+ # token validation should now result in 404
+ self.get('/auth/tokens', headers=headers,
+ expected_status=http_client.NOT_FOUND)
+
+
+class TokenDataTests(object):
+ """Test the data in specific token types."""
+
+ def test_unscoped_token_format(self):
+ # ensure the unscoped token response contains the appropriate data
+ r = self.get('/auth/tokens', headers=self.headers)
+ self.assertValidUnscopedTokenResponse(r)
+
+ def test_domain_scoped_token_format(self):
+ # ensure the domain scoped token response contains the appropriate data
+ self.assignment_api.create_grant(
+ self.role['id'],
+ user_id=self.default_domain_user['id'],
+ domain_id=self.domain['id'])
+
+ domain_scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ domain_id=self.domain['id'])
+ )
+ self.headers['X-Subject-Token'] = domain_scoped_token
+ r = self.get('/auth/tokens', headers=self.headers)
+ self.assertValidDomainScopedTokenResponse(r)
+
+ def test_project_scoped_token_format(self):
+ # ensure project scoped token responses contains the appropriate data
+ project_scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project['id'])
+ )
+ self.headers['X-Subject-Token'] = project_scoped_token
+ r = self.get('/auth/tokens', headers=self.headers)
+ self.assertValidProjectScopedTokenResponse(r)
+
+ def test_extra_data_in_unscoped_token_fails_validation(self):
+ # ensure unscoped token response contains the appropriate data
+ r = self.get('/auth/tokens', headers=self.headers)
+
+ # populate the response result with some extra data
+ r.result['token'][u'extra'] = unicode(uuid.uuid4().hex)
+ self.assertRaises(exception.SchemaValidationError,
+ self.assertValidUnscopedTokenResponse,
+ r)
+
+ def test_extra_data_in_domain_scoped_token_fails_validation(self):
+ # ensure domain scoped token response contains the appropriate data
+ self.assignment_api.create_grant(
+ self.role['id'],
+ user_id=self.default_domain_user['id'],
+ domain_id=self.domain['id'])
+
+ domain_scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ domain_id=self.domain['id'])
+ )
+ self.headers['X-Subject-Token'] = domain_scoped_token
+ r = self.get('/auth/tokens', headers=self.headers)
+
+ # populate the response result with some extra data
+ r.result['token'][u'extra'] = unicode(uuid.uuid4().hex)
+ self.assertRaises(exception.SchemaValidationError,
+ self.assertValidDomainScopedTokenResponse,
+ r)
+
+ def test_extra_data_in_project_scoped_token_fails_validation(self):
+ # ensure project scoped token responses contains the appropriate data
+ project_scoped_token = self.get_requested_token(
+ self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project['id'])
+ )
+ self.headers['X-Subject-Token'] = project_scoped_token
+ resp = self.get('/auth/tokens', headers=self.headers)
+
+ # populate the response result with some extra data
+ resp.result['token'][u'extra'] = unicode(uuid.uuid4().hex)
+ self.assertRaises(exception.SchemaValidationError,
+ self.assertValidProjectScopedTokenResponse,
+ resp)
+
class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
def config_overrides(self):
@@ -431,7 +1164,7 @@ class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
allow_rescope_scoped_token=False)
def test_rescoping_v3_to_v3_disabled(self):
- self.v3_authenticate_token(
+ self.v3_create_token(
self.build_authentication_request(
token=self.get_scoped_token(),
project_id=self.project_id),
@@ -465,7 +1198,7 @@ class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
def test_rescoping_v2_to_v3_disabled(self):
token = self._v2_token()
- self.v3_authenticate_token(
+ self.v3_create_token(
self.build_authentication_request(
token=token['access']['token']['id'],
project_id=self.project_id),
@@ -481,7 +1214,7 @@ class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
def test_rescoped_domain_token_disabled(self):
- self.domainA = self.new_domain_ref()
+ self.domainA = unit.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
self.assignment_api.create_grant(self.role['id'],
user_id=self.user['id'],
@@ -495,14 +1228,14 @@ class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase):
self.build_authentication_request(
token=unscoped_token,
domain_id=self.domainA['id']))
- self.v3_authenticate_token(
+ self.v3_create_token(
self.build_authentication_request(
token=domain_scoped_token,
project_id=self.project_id),
expected_status=http_client.FORBIDDEN)
-class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests, TokenDataTests):
def config_overrides(self):
super(TestPKITokenAPIs, self).config_overrides()
self.config_fixture.config(group='token', provider='pki')
@@ -518,7 +1251,7 @@ class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- resp = self.v3_authenticate_token(auth_data)
+ resp = self.v3_create_token(auth_data)
token_data = resp.result
token_id = resp.headers.get('X-Subject-Token')
self.assertIn('expires_at', token_data['token'])
@@ -542,7 +1275,7 @@ class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
user_id=self.default_domain_user['id'],
password=self.default_domain_user['password'],
project_id=self.default_domain_project['id'])
- resp = self.v3_authenticate_token(auth_data)
+ resp = self.v3_create_token(auth_data)
token_data = resp.result
token = resp.headers.get('X-Subject-Token')
@@ -550,7 +1283,7 @@ class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
token = cms.cms_hash_token(token)
path = '/v2.0/tokens/%s' % (token)
resp = self.admin_request(path=path,
- token=CONF.admin_token,
+ token=self.get_admin_token(),
method='GET')
v2_token = resp.result
self.assertEqual(v2_token['access']['user']['id'],
@@ -559,8 +1292,8 @@ class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
# just need to make sure the non fraction part agrees
self.assertIn(v2_token['access']['token']['expires'][:-1],
token_data['token']['expires_at'])
- self.assertEqual(v2_token['access']['user']['roles'][0]['id'],
- token_data['token']['roles'][0]['id'])
+ self.assertEqual(v2_token['access']['user']['roles'][0]['name'],
+ token_data['token']['roles'][0]['name'])
class TestPKIZTokenAPIs(TestPKITokenAPIs):
@@ -572,7 +1305,8 @@ class TestPKIZTokenAPIs(TestPKITokenAPIs):
return cms.pkiz_verify(*args, **kwargs)
-class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests,
+ TokenDataTests):
def config_overrides(self):
super(TestUUIDTokenAPIs, self).config_overrides()
self.config_fixture.config(group='token', provider='uuid')
@@ -585,14 +1319,15 @@ class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- resp = self.v3_authenticate_token(auth_data)
+ resp = self.v3_create_token(auth_data)
token_data = resp.result
token_id = resp.headers.get('X-Subject-Token')
self.assertIn('expires_at', token_data['token'])
self.assertFalse(cms.is_asn1_token(token_id))
-class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
+class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests,
+ TokenDataTests):
def config_overrides(self):
super(TestFernetTokenAPIs, self).config_overrides()
self.config_fixture.config(group='token', provider='fernet')
@@ -602,6 +1337,34 @@ class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests):
super(TestFernetTokenAPIs, self).setUp()
self.doSetUp()
+ def _make_auth_request(self, auth_data):
+ token = super(TestFernetTokenAPIs, self)._make_auth_request(auth_data)
+ self.assertLess(len(token), 255)
+ return token
+
+ def test_validate_tampered_unscoped_token_fails(self):
+ unscoped_token = self._get_unscoped_token()
+ tampered_token = (unscoped_token[:50] + uuid.uuid4().hex +
+ unscoped_token[50 + 32:])
+ self._validate_token(tampered_token,
+ expected_status=http_client.NOT_FOUND)
+
+ def test_validate_tampered_project_scoped_token_fails(self):
+ project_scoped_token = self._get_project_scoped_token()
+ tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex +
+ project_scoped_token[50 + 32:])
+ self._validate_token(tampered_token,
+ expected_status=http_client.NOT_FOUND)
+
+ def test_validate_tampered_trust_scoped_token_fails(self):
+ trustee_user, trust = self._create_trust()
+ trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
+ # Get a trust scoped token
+ tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex +
+ trust_scoped_token[50 + 32:])
+ self._validate_token(tampered_token,
+ expected_status=http_client.NOT_FOUND)
+
class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
"""Test token revoke using v3 Identity API by token owner and admin."""
@@ -616,29 +1379,22 @@ class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
"""
super(TestTokenRevokeSelfAndAdmin, self).load_sample_data()
# DomainA setup
- self.domainA = self.new_domain_ref()
+ self.domainA = unit.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.userAdminA = self.new_user_ref(domain_id=self.domainA['id'])
- password = self.userAdminA['password']
- self.userAdminA = self.identity_api.create_user(self.userAdminA)
- self.userAdminA['password'] = password
+ self.userAdminA = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
- self.userNormalA = self.new_user_ref(
- domain_id=self.domainA['id'])
- password = self.userNormalA['password']
- self.userNormalA = self.identity_api.create_user(self.userNormalA)
- self.userNormalA['password'] = password
+ self.userNormalA = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.userAdminA['id'],
domain_id=self.domainA['id'])
- def config_overrides(self):
- super(TestTokenRevokeSelfAndAdmin, self).config_overrides()
- self.config_fixture.config(
- group='oslo_policy',
- policy_file=unit.dirs.etc('policy.v3cloudsample.json'))
+ def _policy_fixture(self):
+ return ksfixtures.Policy(unit.dirs.etc('policy.v3cloudsample.json'),
+ self.config_fixture)
def test_user_revokes_own_token(self):
user_token = self.get_requested_token(
@@ -655,11 +1411,13 @@ class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
password=self.userAdminA['password'],
domain_name=self.domainA['name']))
- self.head('/auth/tokens', headers=headers, expected_status=200,
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.OK,
token=adminA_token)
- self.head('/auth/tokens', headers=headers, expected_status=200,
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.OK,
token=user_token)
- self.delete('/auth/tokens', headers=headers, expected_status=204,
+ self.delete('/auth/tokens', headers=headers,
token=user_token)
# invalid X-Auth-Token and invalid X-Subject-Token
self.head('/auth/tokens', headers=headers,
@@ -693,11 +1451,13 @@ class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
password=self.userAdminA['password'],
domain_name=self.domainA['name']))
- self.head('/auth/tokens', headers=headers, expected_status=200,
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.OK,
token=adminA_token)
- self.head('/auth/tokens', headers=headers, expected_status=200,
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.OK,
token=user_token)
- self.delete('/auth/tokens', headers=headers, expected_status=204,
+ self.delete('/auth/tokens', headers=headers,
token=adminA_token)
# invalid X-Auth-Token and invalid X-Subject-Token
self.head('/auth/tokens', headers=headers,
@@ -714,14 +1474,12 @@ class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
def test_adminB_fails_revoking_userA_token(self):
# DomainB setup
- self.domainB = self.new_domain_ref()
+ self.domainB = unit.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.userAdminB = self.new_user_ref(domain_id=self.domainB['id'])
- password = self.userAdminB['password']
- self.userAdminB = self.identity_api.create_user(self.userAdminB)
- self.userAdminB['password'] = password
+ userAdminB = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
self.assignment_api.create_grant(self.role['id'],
- user_id=self.userAdminB['id'],
+ user_id=userAdminB['id'],
domain_id=self.domainB['id'])
user_token = self.get_requested_token(
@@ -733,8 +1491,8 @@ class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase):
adminB_token = self.get_requested_token(
self.build_authentication_request(
- user_id=self.userAdminB['id'],
- password=self.userAdminB['password'],
+ user_id=userAdminB['id'],
+ password=userAdminB['password'],
domain_name=self.domainB['name']))
self.head('/auth/tokens', headers=headers,
@@ -750,7 +1508,6 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
def config_overrides(self):
super(TestTokenRevokeById, self).config_overrides()
- self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
provider='pki',
@@ -782,44 +1539,32 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
super(TestTokenRevokeById, self).setUp()
# Start by creating a couple of domains and projects
- self.domainA = self.new_domain_ref()
+ self.domainA = unit.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = self.new_domain_ref()
+ self.domainB = unit.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.projectA = self.new_project_ref(domain_id=self.domainA['id'])
+ self.projectA = unit.new_project_ref(domain_id=self.domainA['id'])
self.resource_api.create_project(self.projectA['id'], self.projectA)
- self.projectB = self.new_project_ref(domain_id=self.domainA['id'])
+ self.projectB = unit.new_project_ref(domain_id=self.domainA['id'])
self.resource_api.create_project(self.projectB['id'], self.projectB)
# Now create some users
- self.user1 = self.new_user_ref(
- domain_id=self.domainA['id'])
- password = self.user1['password']
- self.user1 = self.identity_api.create_user(self.user1)
- self.user1['password'] = password
-
- self.user2 = self.new_user_ref(
- domain_id=self.domainB['id'])
- password = self.user2['password']
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
-
- self.user3 = self.new_user_ref(
- domain_id=self.domainB['id'])
- password = self.user3['password']
- self.user3 = self.identity_api.create_user(self.user3)
- self.user3['password'] = password
-
- self.group1 = self.new_group_ref(
- domain_id=self.domainA['id'])
+ self.user1 = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
+
+ self.user2 = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
+
+ self.user3 = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
+
+ self.group1 = unit.new_group_ref(domain_id=self.domainA['id'])
self.group1 = self.identity_api.create_group(self.group1)
- self.group2 = self.new_group_ref(
- domain_id=self.domainA['id'])
+ self.group2 = unit.new_group_ref(domain_id=self.domainA['id'])
self.group2 = self.identity_api.create_group(self.group2)
- self.group3 = self.new_group_ref(
- domain_id=self.domainB['id'])
+ self.group3 = unit.new_group_ref(domain_id=self.domainB['id'])
self.group3 = self.identity_api.create_group(self.group3)
self.identity_api.add_user_to_group(self.user1['id'],
@@ -829,9 +1574,9 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.identity_api.add_user_to_group(self.user3['id'],
self.group2['id'])
- self.role1 = self.new_role_ref()
+ self.role1 = unit.new_role_ref()
self.role_api.create_role(self.role1['id'], self.role1)
- self.role2 = self.new_role_ref()
+ self.role2 = unit.new_role_ref()
self.role_api.create_role(self.role2['id'], self.role2)
self.assignment_api.create_grant(self.role2['id'],
@@ -864,13 +1609,13 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# confirm both tokens are valid
self.head('/auth/tokens',
headers={'X-Subject-Token': unscoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': scoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
# create a new role
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
# assign a new role
@@ -883,10 +1628,10 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# both tokens should remain valid
self.head('/auth/tokens',
headers={'X-Subject-Token': unscoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': scoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
def test_deleting_user_grant_revokes_token(self):
"""Test deleting a user grant revokes token.
@@ -906,7 +1651,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# Confirm token is valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
# Delete the grant, which should invalidate the token
grant_url = (
'/projects/%(project_id)s/users/%(user_id)s/'
@@ -920,22 +1665,14 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
expected_status=http_client.NOT_FOUND)
def role_data_fixtures(self):
- self.projectC = self.new_project_ref(domain_id=self.domainA['id'])
+ self.projectC = unit.new_project_ref(domain_id=self.domainA['id'])
self.resource_api.create_project(self.projectC['id'], self.projectC)
- self.user4 = self.new_user_ref(domain_id=self.domainB['id'])
- password = self.user4['password']
- self.user4 = self.identity_api.create_user(self.user4)
- self.user4['password'] = password
- self.user5 = self.new_user_ref(
- domain_id=self.domainA['id'])
- password = self.user5['password']
- self.user5 = self.identity_api.create_user(self.user5)
- self.user5['password'] = password
- self.user6 = self.new_user_ref(
- domain_id=self.domainA['id'])
- password = self.user6['password']
- self.user6 = self.identity_api.create_user(self.user6)
- self.user6['password'] = password
+ self.user4 = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
+ self.user5 = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
+ self.user6 = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
self.identity_api.add_user_to_group(self.user5['id'],
self.group1['id'])
self.assignment_api.create_grant(self.role1['id'],
@@ -954,29 +1691,29 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
def test_deleting_role_revokes_token(self):
"""Test deleting a role revokes token.
- Add some additional test data, namely:
- - A third project (project C)
- - Three additional users - user4 owned by domainB and user5 and 6
- owned by domainA (different domain ownership should not affect
- the test results, just provided to broaden test coverage)
- - User5 is a member of group1
- - Group1 gets an additional assignment - role1 on projectB as
- well as its existing role1 on projectA
- - User4 has role2 on Project C
- - User6 has role1 on projectA and domainA
- - This allows us to create 5 tokens by virtue of different types
- of role assignment:
- - user1, scoped to ProjectA by virtue of user role1 assignment
- - user5, scoped to ProjectB by virtue of group role1 assignment
- - user4, scoped to ProjectC by virtue of user role2 assignment
- - user6, scoped to ProjectA by virtue of user role1 assignment
- - user6, scoped to DomainA by virtue of user role1 assignment
- - role1 is then deleted
- - Check the tokens on Project A and B, and DomainA are revoked,
- but not the one for Project C
+ Add some additional test data, namely:
+
+ - A third project (project C)
+ - Three additional users - user4 owned by domainB and user5 and 6 owned
+ by domainA (different domain ownership should not affect the test
+ results, just provided to broaden test coverage)
+ - User5 is a member of group1
+ - Group1 gets an additional assignment - role1 on projectB as well as
+ its existing role1 on projectA
+ - User4 has role2 on Project C
+ - User6 has role1 on projectA and domainA
+ - This allows us to create 5 tokens by virtue of different types of
+ role assignment:
+ - user1, scoped to ProjectA by virtue of user role1 assignment
+ - user5, scoped to ProjectB by virtue of group role1 assignment
+ - user4, scoped to ProjectC by virtue of user role2 assignment
+ - user6, scoped to ProjectA by virtue of user role1 assignment
+ - user6, scoped to DomainA by virtue of user role1 assignment
+ - role1 is then deleted
+ - Check the tokens on Project A and B, and DomainA are revoked, but not
+ the one for Project C
"""
-
self.role_data_fixtures()
# Now we are ready to start issuing requests
@@ -1008,19 +1745,19 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# Confirm tokens are valid
self.head('/auth/tokens',
headers={'X-Subject-Token': tokenA},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': tokenB},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': tokenC},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': tokenD},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': tokenE},
- expected_status=200)
+ expected_status=http_client.OK)
# Delete the role, which should invalidate the tokens
role_url = '/roles/%s' % self.role1['id']
@@ -1043,7 +1780,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# ...but the one using role2 is still valid
self.head('/auth/tokens',
headers={'X-Subject-Token': tokenC},
- expected_status=200)
+ expected_status=http_client.OK)
def test_domain_user_role_assignment_maintains_token(self):
"""Test user-domain role assignment maintains existing token.
@@ -1063,7 +1800,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# Confirm token is valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
# Assign a role, which should not affect the token
grant_url = (
'/domains/%(domain_id)s/users/%(user_id)s/'
@@ -1074,7 +1811,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.put(grant_url)
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
def test_disabling_project_revokes_token(self):
token = self.get_requested_token(
@@ -1086,7 +1823,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# confirm token is valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
# disable the project, which should invalidate the token
self.patch(
@@ -1097,7 +1834,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
expected_status=http_client.NOT_FOUND)
- self.v3_authenticate_token(
+ self.v3_create_token(
self.build_authentication_request(
user_id=self.user3['id'],
password=self.user3['password'],
@@ -1114,7 +1851,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# confirm token is valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
# delete the project, which should invalidate the token
self.delete(
@@ -1124,7 +1861,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
expected_status=http_client.NOT_FOUND)
- self.v3_authenticate_token(
+ self.v3_create_token(
self.build_authentication_request(
user_id=self.user3['id'],
password=self.user3['password'],
@@ -1163,13 +1900,13 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# Confirm tokens are valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token1},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': token2},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': token3},
- expected_status=200)
+ expected_status=http_client.OK)
# Delete the group grant, which should invalidate the
# tokens for user1 and user2
grant_url = (
@@ -1209,7 +1946,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# Confirm token is valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
# Delete the grant, which should invalidate the token
grant_url = (
'/domains/%(domain_id)s/groups/%(group_id)s/'
@@ -1220,7 +1957,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.put(grant_url)
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
def test_group_membership_changes_revokes_token(self):
"""Test add/removal to/from group revokes token.
@@ -1250,10 +1987,10 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# Confirm tokens are valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token1},
- expected_status=200)
+ expected_status=http_client.OK)
self.head('/auth/tokens',
headers={'X-Subject-Token': token2},
- expected_status=200)
+ expected_status=http_client.OK)
# Remove user1 from group1, which should invalidate
# the token
self.delete('/groups/%(group_id)s/users/%(user_id)s' % {
@@ -1265,18 +2002,17 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# But user2's token should still be valid
self.head('/auth/tokens',
headers={'X-Subject-Token': token2},
- expected_status=200)
+ expected_status=http_client.OK)
# Adding user2 to a group should not invalidate token
self.put('/groups/%(group_id)s/users/%(user_id)s' % {
'group_id': self.group2['id'],
'user_id': self.user2['id']})
self.head('/auth/tokens',
headers={'X-Subject-Token': token2},
- expected_status=200)
+ expected_status=http_client.OK)
def test_removing_role_assignment_does_not_affect_other_users(self):
"""Revoking a role from one user should not affect other users."""
-
# This group grant is not needed for the test
self.delete(
'/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' %
@@ -1306,7 +2042,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.head('/auth/tokens',
headers={'X-Subject-Token': user1_token},
expected_status=http_client.NOT_FOUND)
- self.v3_authenticate_token(
+ self.v3_create_token(
self.build_authentication_request(
user_id=self.user1['id'],
password=self.user1['password'],
@@ -1316,8 +2052,8 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# authorization for the second user should still succeed
self.head('/auth/tokens',
headers={'X-Subject-Token': user3_token},
- expected_status=200)
- self.v3_authenticate_token(
+ expected_status=http_client.OK)
+ self.v3_create_token(
self.build_authentication_request(
user_id=self.user3['id'],
password=self.user3['password'],
@@ -1338,7 +2074,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
self.delete(
'/projects/%(project_id)s' % {'project_id': self.projectA['id']})
- # Make sure that we get a NotFound(404) when heading that role.
+ # Make sure that we get a 404 Not Found when heading that role.
self.head(role_path, expected_status=http_client.NOT_FOUND)
def get_v2_token(self, token=None, project_id=None):
@@ -1366,8 +2102,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
token = self.get_v2_token()
self.delete('/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=204)
+ headers={'X-Subject-Token': token})
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
@@ -1397,8 +2132,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# revoke the project-scoped token.
self.delete('/auth/tokens',
- headers={'X-Subject-Token': project_scoped_token},
- expected_status=204)
+ headers={'X-Subject-Token': project_scoped_token})
# The project-scoped token is invalidated.
self.head('/auth/tokens',
@@ -1408,17 +2142,16 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# The unscoped token should still be valid.
self.head('/auth/tokens',
headers={'X-Subject-Token': unscoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
# The domain-scoped token should still be valid.
self.head('/auth/tokens',
headers={'X-Subject-Token': domain_scoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
# revoke the domain-scoped token.
self.delete('/auth/tokens',
- headers={'X-Subject-Token': domain_scoped_token},
- expected_status=204)
+ headers={'X-Subject-Token': domain_scoped_token})
# The domain-scoped token is invalid.
self.head('/auth/tokens',
@@ -1428,16 +2161,13 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# The unscoped token should still be valid.
self.head('/auth/tokens',
headers={'X-Subject-Token': unscoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
def test_revoke_token_from_token_v2(self):
# Test that a scoped token can be requested from an unscoped token,
# the scoped token can be revoked, and the unscoped token remains
# valid.
- # FIXME(blk-u): This isn't working correctly. The scoped token should
- # be revoked. See bug 1347318.
-
unscoped_token = self.get_v2_token()
# Get a project-scoped token from the unscoped token
@@ -1446,8 +2176,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# revoke the project-scoped token.
self.delete('/auth/tokens',
- headers={'X-Subject-Token': project_scoped_token},
- expected_status=204)
+ headers={'X-Subject-Token': project_scoped_token})
# The project-scoped token is invalidated.
self.head('/auth/tokens',
@@ -1457,7 +2186,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
# The unscoped token should still be valid.
self.head('/auth/tokens',
headers={'X-Subject-Token': unscoped_token},
- expected_status=200)
+ expected_status=http_client.OK)
class TestTokenRevokeByAssignment(TestTokenRevokeById):
@@ -1465,9 +2194,6 @@ class TestTokenRevokeByAssignment(TestTokenRevokeById):
def config_overrides(self):
super(TestTokenRevokeById, self).config_overrides()
self.config_fixture.config(
- group='revoke',
- driver='kvs')
- self.config_fixture.config(
group='token',
provider='uuid',
revoke_by_id=True)
@@ -1501,7 +2227,7 @@ class TestTokenRevokeByAssignment(TestTokenRevokeById):
# authorization for the projectA should still succeed
self.head('/auth/tokens',
headers={'X-Subject-Token': other_project_token},
- expected_status=200)
+ expected_status=http_client.OK)
# while token for the projectB should not
self.head('/auth/tokens',
headers={'X-Subject-Token': project_token},
@@ -1512,14 +2238,21 @@ class TestTokenRevokeByAssignment(TestTokenRevokeById):
self.assertIn(project_token, revoked_tokens)
-class TestTokenRevokeApi(TestTokenRevokeById):
- EXTENSION_NAME = 'revoke'
- EXTENSION_TO_ADD = 'revoke_extension'
+class RevokeContribTests(test_v3.RestfulTestCase):
+ @mock.patch.object(versionutils, 'report_deprecated_feature')
+ def test_exception_happens(self, mock_deprecator):
+ routers.RevokeExtension(mock.ANY)
+ mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("Remove revoke_extension from", args[1])
+
+
+class TestTokenRevokeApi(TestTokenRevokeById):
"""Test token revocation on the v3 Identity API."""
+
def config_overrides(self):
super(TestTokenRevokeApi, self).config_overrides()
- self.config_fixture.config(group='revoke', driver='kvs')
self.config_fixture.config(
group='token',
provider='pki',
@@ -1536,15 +2269,19 @@ class TestTokenRevokeApi(TestTokenRevokeById):
expected_response = {'events': [{'project_id': project_id}]}
self.assertEqual(expected_response, events_response)
- def assertDomainInList(self, events_response, domain_id):
+ def assertDomainAndProjectInList(self, events_response, domain_id):
events = events_response['events']
- self.assertEqual(1, len(events))
- self.assertEqual(domain_id, events[0]['domain_id'])
+ self.assertEqual(2, len(events))
+ self.assertEqual(domain_id, events[0]['project_id'])
+ self.assertEqual(domain_id, events[1]['domain_id'])
self.assertIsNotNone(events[0]['issued_before'])
+ self.assertIsNotNone(events[1]['issued_before'])
self.assertIsNotNone(events_response['links'])
del (events_response['events'][0]['issued_before'])
+ del (events_response['events'][1]['issued_before'])
del (events_response['links'])
- expected_response = {'events': [{'domain_id': domain_id}]}
+ expected_response = {'events': [{'project_id': domain_id},
+ {'domain_id': domain_id}]}
self.assertEqual(expected_response, events_response)
def assertValidRevokedTokenResponse(self, events_response, **kwargs):
@@ -1563,62 +2300,55 @@ class TestTokenRevokeApi(TestTokenRevokeById):
def test_revoke_token(self):
scoped_token = self.get_scoped_token()
headers = {'X-Subject-Token': scoped_token}
- response = self.get('/auth/tokens', headers=headers,
- expected_status=200).json_body['token']
+ response = self.get('/auth/tokens', headers=headers).json_body['token']
- self.delete('/auth/tokens', headers=headers, expected_status=204)
+ self.delete('/auth/tokens', headers=headers)
self.head('/auth/tokens', headers=headers,
expected_status=http_client.NOT_FOUND)
- events_response = self.get('/OS-REVOKE/events',
- expected_status=200).json_body
+ events_response = self.get('/OS-REVOKE/events').json_body
self.assertValidRevokedTokenResponse(events_response,
audit_id=response['audit_ids'][0])
def test_revoke_v2_token(self):
token = self.get_v2_token()
headers = {'X-Subject-Token': token}
- response = self.get('/auth/tokens', headers=headers,
- expected_status=200).json_body['token']
- self.delete('/auth/tokens', headers=headers, expected_status=204)
+ response = self.get('/auth/tokens',
+ headers=headers).json_body['token']
+ self.delete('/auth/tokens', headers=headers)
self.head('/auth/tokens', headers=headers,
expected_status=http_client.NOT_FOUND)
- events_response = self.get('/OS-REVOKE/events',
- expected_status=200).json_body
+ events_response = self.get('/OS-REVOKE/events').json_body
self.assertValidRevokedTokenResponse(
events_response,
audit_id=response['audit_ids'][0])
- def test_revoke_by_id_false_410(self):
+ def test_revoke_by_id_false_returns_gone(self):
self.get('/auth/tokens/OS-PKI/revoked',
expected_status=http_client.GONE)
def test_list_delete_project_shows_in_event_list(self):
self.role_data_fixtures()
- events = self.get('/OS-REVOKE/events',
- expected_status=200).json_body['events']
+ events = self.get('/OS-REVOKE/events').json_body['events']
self.assertEqual([], events)
self.delete(
'/projects/%(project_id)s' % {'project_id': self.projectA['id']})
- events_response = self.get('/OS-REVOKE/events',
- expected_status=200).json_body
+ events_response = self.get('/OS-REVOKE/events').json_body
self.assertValidDeletedProjectResponse(events_response,
self.projectA['id'])
def test_disable_domain_shows_in_event_list(self):
- events = self.get('/OS-REVOKE/events',
- expected_status=200).json_body['events']
+ events = self.get('/OS-REVOKE/events').json_body['events']
self.assertEqual([], events)
disable_body = {'domain': {'enabled': False}}
self.patch(
'/domains/%(project_id)s' % {'project_id': self.domainA['id']},
body=disable_body)
- events = self.get('/OS-REVOKE/events',
- expected_status=200).json_body
+ events = self.get('/OS-REVOKE/events').json_body
- self.assertDomainInList(events, self.domainA['id'])
+ self.assertDomainAndProjectInList(events, self.domainA['id'])
def assertEventDataInList(self, events, **kwargs):
found = False
@@ -1646,30 +2376,31 @@ class TestTokenRevokeApi(TestTokenRevokeById):
def test_list_delete_token_shows_in_event_list(self):
self.role_data_fixtures()
- events = self.get('/OS-REVOKE/events',
- expected_status=200).json_body['events']
+ events = self.get('/OS-REVOKE/events').json_body['events']
self.assertEqual([], events)
scoped_token = self.get_scoped_token()
headers = {'X-Subject-Token': scoped_token}
auth_req = self.build_authentication_request(token=scoped_token)
- response = self.v3_authenticate_token(auth_req)
+ response = self.v3_create_token(auth_req)
token2 = response.json_body['token']
headers2 = {'X-Subject-Token': response.headers['X-Subject-Token']}
- response = self.v3_authenticate_token(auth_req)
+ response = self.v3_create_token(auth_req)
response.json_body['token']
headers3 = {'X-Subject-Token': response.headers['X-Subject-Token']}
- self.head('/auth/tokens', headers=headers, expected_status=200)
- self.head('/auth/tokens', headers=headers2, expected_status=200)
- self.head('/auth/tokens', headers=headers3, expected_status=200)
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.OK)
+ self.head('/auth/tokens', headers=headers2,
+ expected_status=http_client.OK)
+ self.head('/auth/tokens', headers=headers3,
+ expected_status=http_client.OK)
- self.delete('/auth/tokens', headers=headers, expected_status=204)
+ self.delete('/auth/tokens', headers=headers)
# NOTE(ayoung): not deleting token3, as it should be deleted
# by previous
- events_response = self.get('/OS-REVOKE/events',
- expected_status=200).json_body
+ events_response = self.get('/OS-REVOKE/events').json_body
events = events_response['events']
self.assertEqual(1, len(events))
self.assertEventDataInList(
@@ -1677,32 +2408,32 @@ class TestTokenRevokeApi(TestTokenRevokeById):
audit_id=token2['audit_ids'][1])
self.head('/auth/tokens', headers=headers,
expected_status=http_client.NOT_FOUND)
- self.head('/auth/tokens', headers=headers2, expected_status=200)
- self.head('/auth/tokens', headers=headers3, expected_status=200)
+ self.head('/auth/tokens', headers=headers2,
+ expected_status=http_client.OK)
+ self.head('/auth/tokens', headers=headers3,
+ expected_status=http_client.OK)
def test_list_with_filter(self):
self.role_data_fixtures()
- events = self.get('/OS-REVOKE/events',
- expected_status=200).json_body['events']
+ events = self.get('/OS-REVOKE/events').json_body['events']
self.assertEqual(0, len(events))
scoped_token = self.get_scoped_token()
headers = {'X-Subject-Token': scoped_token}
auth = self.build_authentication_request(token=scoped_token)
headers2 = {'X-Subject-Token': self.get_requested_token(auth)}
- self.delete('/auth/tokens', headers=headers, expected_status=204)
- self.delete('/auth/tokens', headers=headers2, expected_status=204)
+ self.delete('/auth/tokens', headers=headers)
+ self.delete('/auth/tokens', headers=headers2)
- events = self.get('/OS-REVOKE/events',
- expected_status=200).json_body['events']
+ events = self.get('/OS-REVOKE/events').json_body['events']
self.assertEqual(2, len(events))
future = utils.isotime(timeutils.utcnow() +
datetime.timedelta(seconds=1000))
- events = self.get('/OS-REVOKE/events?since=%s' % (future),
- expected_status=200).json_body['events']
+ events = self.get('/OS-REVOKE/events?since=%s' % (future)
+ ).json_body['events']
self.assertEqual(0, len(events))
@@ -1764,7 +2495,7 @@ class TestAuthExternalDomain(test_v3.RestfulTestCase):
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
'REMOTE_DOMAIN': remote_domain,
'AUTH_TYPE': 'Negotiate'})
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidProjectScopedTokenResponse(r)
self.assertEqual(self.user['name'], token['bind']['kerberos'])
@@ -1776,7 +2507,7 @@ class TestAuthExternalDomain(test_v3.RestfulTestCase):
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
'REMOTE_DOMAIN': remote_domain,
'AUTH_TYPE': 'Negotiate'})
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidUnscopedTokenResponse(r)
self.assertEqual(self.user['name'], token['bind']['kerberos'])
@@ -1820,7 +2551,7 @@ class TestAuthExternalDefaultDomain(test_v3.RestfulTestCase):
remote_user = self.default_domain_user['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
'AUTH_TYPE': 'Negotiate'})
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidProjectScopedTokenResponse(r)
self.assertEqual(self.default_domain_user['name'],
token['bind']['kerberos'])
@@ -1831,7 +2562,7 @@ class TestAuthExternalDefaultDomain(test_v3.RestfulTestCase):
remote_user = self.default_domain_user['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
'AUTH_TYPE': 'Negotiate'})
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidUnscopedTokenResponse(r)
self.assertEqual(self.default_domain_user['name'],
token['bind']['kerberos'])
@@ -1852,7 +2583,7 @@ class TestAuth(test_v3.RestfulTestCase):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def test_unscoped_token_with_user_domain_id(self):
@@ -1860,7 +2591,7 @@ class TestAuth(test_v3.RestfulTestCase):
username=self.user['name'],
user_domain_id=self.domain['id'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def test_unscoped_token_with_user_domain_name(self):
@@ -1868,7 +2599,7 @@ class TestAuth(test_v3.RestfulTestCase):
username=self.user['name'],
user_domain_name=self.domain['name'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def test_project_id_scoped_token_with_user_id(self):
@@ -1876,11 +2607,11 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidProjectScopedTokenResponse(r)
def _second_project_as_default(self):
- ref = self.new_project_ref(domain_id=self.domain_id)
+ ref = unit.new_project_ref(domain_id=self.domain_id)
r = self.post('/projects', body={'project': ref})
project = self.assertValidProjectResponse(r, ref)
@@ -1907,7 +2638,7 @@ class TestAuth(test_v3.RestfulTestCase):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidProjectScopedTokenResponse(r)
self.assertEqual(project['id'], r.result['token']['project']['id'])
@@ -1952,7 +2683,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
catalog = r.result['token']['catalog']
self.assertEqual(1, len(catalog))
@@ -1989,13 +2720,12 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertEqual([], r.result['token']['catalog'])
def test_auth_catalog_disabled_endpoint(self):
"""On authenticate, get a catalog that excludes disabled endpoints."""
-
# Create a disabled endpoint that's like the enabled one.
disabled_endpoint_ref = copy.copy(self.endpoint)
disabled_endpoint_id = uuid.uuid4().hex
@@ -2011,21 +2741,21 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self._check_disabled_endpoint_result(r.result['token']['catalog'],
disabled_endpoint_id)
def test_project_id_scoped_token_with_user_id_unauthorized(self):
- project = self.new_project_ref(domain_id=self.domain_id)
+ project = unit.new_project_ref(domain_id=self.domain_id)
self.resource_api.create_project(project['id'], project)
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=project['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_user_and_group_roles_scoped_token(self):
"""Test correct roles are returned in scoped token.
@@ -2049,30 +2779,19 @@ class TestAuth(test_v3.RestfulTestCase):
tokens
"""
-
- domainA = self.new_domain_ref()
+ domainA = unit.new_domain_ref()
self.resource_api.create_domain(domainA['id'], domainA)
- projectA = self.new_project_ref(domain_id=domainA['id'])
+ projectA = unit.new_project_ref(domain_id=domainA['id'])
self.resource_api.create_project(projectA['id'], projectA)
- user1 = self.new_user_ref(
- domain_id=domainA['id'])
- password = user1['password']
- user1 = self.identity_api.create_user(user1)
- user1['password'] = password
+ user1 = unit.create_user(self.identity_api, domain_id=domainA['id'])
- user2 = self.new_user_ref(
- domain_id=domainA['id'])
- password = user2['password']
- user2 = self.identity_api.create_user(user2)
- user2['password'] = password
+ user2 = unit.create_user(self.identity_api, domain_id=domainA['id'])
- group1 = self.new_group_ref(
- domain_id=domainA['id'])
+ group1 = unit.new_group_ref(domain_id=domainA['id'])
group1 = self.identity_api.create_group(group1)
- group2 = self.new_group_ref(
- domain_id=domainA['id'])
+ group2 = unit.new_group_ref(domain_id=domainA['id'])
group2 = self.identity_api.create_group(group2)
self.identity_api.add_user_to_group(user1['id'],
@@ -2083,7 +2802,7 @@ class TestAuth(test_v3.RestfulTestCase):
# Now create all the roles and assign them
role_list = []
for _ in range(8):
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
role_list.append(role)
@@ -2119,7 +2838,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=user1['id'],
password=user1['password'],
project_id=projectA['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidScopedTokenResponse(r)
roles_ids = []
for ref in token['roles']:
@@ -2133,7 +2852,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=user1['id'],
password=user1['password'],
domain_id=domainA['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidScopedTokenResponse(r)
roles_ids = []
for ref in token['roles']:
@@ -2151,7 +2870,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=user1['id'],
password=user1['password'],
project_id=projectA['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidScopedTokenResponse(r)
roles_ids = []
for ref in token['roles']:
@@ -2164,30 +2883,23 @@ class TestAuth(test_v3.RestfulTestCase):
def test_auth_token_cross_domain_group_and_project(self):
"""Verify getting a token in cross domain group/project roles."""
# create domain, project and group and grant roles to user
- domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ domain1 = unit.new_domain_ref()
self.resource_api.create_domain(domain1['id'], domain1)
- project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
- 'domain_id': domain1['id']}
+ project1 = unit.new_project_ref(domain_id=domain1['id'])
self.resource_api.create_project(project1['id'], project1)
- user_foo = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
- password = user_foo['password']
- user_foo = self.identity_api.create_user(user_foo)
- user_foo['password'] = password
- role_member = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
+ user_foo = unit.create_user(self.identity_api,
+ domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ role_member = unit.new_role_ref()
self.role_api.create_role(role_member['id'], role_member)
- role_admin = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
+ role_admin = unit.new_role_ref()
self.role_api.create_role(role_admin['id'], role_admin)
- role_foo_domain1 = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
+ role_foo_domain1 = unit.new_role_ref()
self.role_api.create_role(role_foo_domain1['id'], role_foo_domain1)
- role_group_domain1 = {'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex}
+ role_group_domain1 = unit.new_role_ref()
self.role_api.create_role(role_group_domain1['id'], role_group_domain1)
self.assignment_api.add_user_to_project(project1['id'],
user_foo['id'])
- new_group = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex}
+ new_group = unit.new_group_ref(domain_id=domain1['id'])
new_group = self.identity_api.create_group(new_group)
self.identity_api.add_user_to_group(user_foo['id'],
new_group['id'])
@@ -2216,7 +2928,7 @@ class TestAuth(test_v3.RestfulTestCase):
project_name=project1['name'],
project_domain_id=domain1['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
scoped_token = self.assertValidScopedTokenResponse(r)
project = scoped_token["project"]
roles_ids = []
@@ -2234,7 +2946,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_domain_id=self.domain['id'],
password=self.user['password'],
project_id=self.project['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidProjectScopedTokenResponse(r)
def test_project_id_scoped_token_with_user_domain_name(self):
@@ -2243,7 +2955,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_domain_name=self.domain['name'],
password=self.user['password'],
project_id=self.project['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidProjectScopedTokenResponse(r)
def test_domain_id_scoped_token_with_user_id(self):
@@ -2255,7 +2967,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
domain_id=self.domain['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_id_scoped_token_with_user_domain_id(self):
@@ -2268,7 +2980,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_domain_id=self.domain['id'],
password=self.user['password'],
domain_id=self.domain['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_id_scoped_token_with_user_domain_name(self):
@@ -2281,7 +2993,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_domain_name=self.domain['name'],
password=self.user['password'],
domain_id=self.domain['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_name_scoped_token_with_user_id(self):
@@ -2293,7 +3005,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
domain_name=self.domain['name'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_name_scoped_token_with_user_domain_id(self):
@@ -2306,7 +3018,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_domain_id=self.domain['id'],
password=self.user['password'],
domain_name=self.domain['name'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_name_scoped_token_with_user_domain_name(self):
@@ -2319,12 +3031,11 @@ class TestAuth(test_v3.RestfulTestCase):
user_domain_name=self.domain['name'],
password=self.user['password'],
domain_name=self.domain['name'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_scope_token_with_group_role(self):
- group = self.new_group_ref(
- domain_id=self.domain_id)
+ group = unit.new_group_ref(domain_id=self.domain_id)
group = self.identity_api.create_group(group)
# add user to group
@@ -2340,7 +3051,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
domain_id=self.domain['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_scope_token_with_name(self):
@@ -2353,7 +3064,7 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
domain_name=self.domain['name'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidDomainScopedTokenResponse(r)
def test_domain_scope_failed(self):
@@ -2361,21 +3072,21 @@ class TestAuth(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
domain_id=self.domain['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_auth_with_id(self):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
token = r.headers.get('X-Subject-Token')
# test token auth
auth_data = self.build_authentication_request(token=token)
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def get_v2_token(self, tenant_id=None):
@@ -2393,7 +3104,7 @@ class TestAuth(test_v3.RestfulTestCase):
def test_validate_v2_unscoped_token_with_v3_api(self):
v2_token = self.get_v2_token().result['access']['token']['id']
auth_data = self.build_authentication_request(token=v2_token)
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def test_validate_v2_scoped_token_with_v3_api(self):
@@ -2404,46 +3115,46 @@ class TestAuth(test_v3.RestfulTestCase):
auth_data = self.build_authentication_request(
token=v2_token,
project_id=self.default_domain_project['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidScopedTokenResponse(r)
def test_invalid_user_id(self):
auth_data = self.build_authentication_request(
user_id=uuid.uuid4().hex,
password=self.user['password'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_invalid_user_name(self):
auth_data = self.build_authentication_request(
username=uuid.uuid4().hex,
user_domain_id=self.domain['id'],
password=self.user['password'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_invalid_domain_id(self):
auth_data = self.build_authentication_request(
username=self.user['name'],
user_domain_id=uuid.uuid4().hex,
password=self.user['password'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_invalid_domain_name(self):
auth_data = self.build_authentication_request(
username=self.user['name'],
user_domain_name=uuid.uuid4().hex,
password=self.user['password'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_invalid_password(self):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=uuid.uuid4().hex)
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_remote_user_no_realm(self):
api = auth.controllers.Auth()
@@ -2524,7 +3235,7 @@ class TestAuth(test_v3.RestfulTestCase):
remote_user = self.default_domain_user['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
'AUTH_TYPE': 'Negotiate'})
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidUnscopedTokenResponse(r)
self.assertNotIn('bind', token)
@@ -2551,7 +3262,7 @@ class TestAuth(test_v3.RestfulTestCase):
remote_user = self.default_domain_user['name']
self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
'AUTH_TYPE': 'Negotiate'})
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
# the unscoped token should have bind information in it
token = self.assertValidUnscopedTokenResponse(r)
@@ -2562,7 +3273,7 @@ class TestAuth(test_v3.RestfulTestCase):
# using unscoped token with remote user succeeds
auth_params = {'token': token, 'project_id': self.project_id}
auth_data = self.build_authentication_request(**auth_params)
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = self.assertValidProjectScopedTokenResponse(r)
# the bind information should be carried over from the original token
@@ -2601,16 +3312,16 @@ class TestAuth(test_v3.RestfulTestCase):
token_data['token']['bind'])
def test_authenticating_a_user_with_no_password(self):
- user = self.new_user_ref(domain_id=self.domain['id'])
- user.pop('password', None) # can't have a password for this test
+ user = unit.new_user_ref(domain_id=self.domain['id'])
+ del user['password'] # can't have a password for this test
user = self.identity_api.create_user(user)
auth_data = self.build_authentication_request(
user_id=user['id'],
password='password')
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_disabled_default_project_result_in_unscoped_token(self):
# create a disabled project to work with
@@ -2626,11 +3337,11 @@ class TestAuth(test_v3.RestfulTestCase):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def test_disabled_default_project_domain_result_in_unscoped_token(self):
- domain_ref = self.new_domain_ref()
+ domain_ref = unit.new_domain_ref()
r = self.post('/domains', body={'domain': domain_ref})
domain = self.assertValidDomainResponse(r, domain_ref)
@@ -2652,7 +3363,7 @@ class TestAuth(test_v3.RestfulTestCase):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def test_no_access_to_default_project_result_in_unscoped_token(self):
@@ -2664,32 +3375,35 @@ class TestAuth(test_v3.RestfulTestCase):
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
self.assertValidUnscopedTokenResponse(r)
def test_disabled_scope_project_domain_result_in_401(self):
# create a disabled domain
- domain = self.new_domain_ref()
- domain['enabled'] = False
- self.resource_api.create_domain(domain['id'], domain)
+ domain = unit.new_domain_ref()
+ domain = self.resource_api.create_domain(domain['id'], domain)
- # create a project in the disabled domain
- project = self.new_project_ref(domain_id=domain['id'])
+ # create a project in the domain
+ project = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project['id'], project)
- # assign some role to self.user for the project in the disabled domain
+ # assign some role to self.user for the project in the domain
self.assignment_api.add_role_to_user_and_project(
self.user['id'],
project['id'],
self.role_id)
+ # Disable the domain
+ domain['enabled'] = False
+ self.resource_api.update_domain(domain['id'], domain)
+
# user should not be able to auth with project_id
auth_data = self.build_authentication_request(
user_id=self.user['id'],
password=self.user['password'],
project_id=project['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
# user should not be able to auth with project_name & domain
auth_data = self.build_authentication_request(
@@ -2697,8 +3411,8 @@ class TestAuth(test_v3.RestfulTestCase):
password=self.user['password'],
project_name=project['name'],
project_domain_id=domain['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_auth_methods_with_different_identities_fails(self):
# get the token for a user. This is self.user which is different from
@@ -2710,8 +3424,124 @@ class TestAuth(test_v3.RestfulTestCase):
token=token,
user_id=self.default_domain_user['id'],
password=self.default_domain_user['password'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_authenticate_fails_if_project_unsafe(self):
+ """Verify authenticate to a project with unsafe name fails."""
+ # Start with url name restrictions off, so we can create the unsafe
+ # named project
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='off')
+ unsafe_name = 'i am not / safe'
+ project = unit.new_project_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID,
+ name=unsafe_name)
+ self.resource_api.create_project(project['id'], project)
+ role_member = unit.new_role_ref()
+ self.role_api.create_role(role_member['id'], role_member)
+ self.assignment_api.add_role_to_user_and_project(
+ self.user['id'], project['id'], role_member['id'])
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_name=project['name'],
+ project_domain_id=test_v3.DEFAULT_DOMAIN_ID)
+
+ # Since name url restriction is off, we should be able to autenticate
+ self.v3_create_token(auth_data)
+
+ # Set the name url restriction to new, which should still allow us to
+ # authenticate
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='new')
+ self.v3_create_token(auth_data)
+
+ # Set the name url restriction to strict and we should fail to
+ # authenticate
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='strict')
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_authenticate_fails_if_domain_unsafe(self):
+ """Verify authenticate to a domain with unsafe name fails."""
+ # Start with url name restrictions off, so we can create the unsafe
+ # named domain
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe='off')
+ unsafe_name = 'i am not / safe'
+ domain = unit.new_domain_ref(name=unsafe_name)
+ self.resource_api.create_domain(domain['id'], domain)
+ role_member = unit.new_role_ref()
+ self.role_api.create_role(role_member['id'], role_member)
+ self.assignment_api.create_grant(
+ role_member['id'],
+ user_id=self.user['id'],
+ domain_id=domain['id'])
+
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_name=domain['name'])
+
+ # Since name url restriction is off, we should be able to autenticate
+ self.v3_create_token(auth_data)
+
+ # Set the name url restriction to new, which should still allow us to
+ # authenticate
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='new')
+ self.v3_create_token(auth_data)
+
+ # Set the name url restriction to strict and we should fail to
+ # authenticate
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe='strict')
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_authenticate_fails_to_project_if_domain_unsafe(self):
+ """Verify authenticate to a project using unsafe domain name fails."""
+ # Start with url name restrictions off, so we can create the unsafe
+ # named domain
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe='off')
+ unsafe_name = 'i am not / safe'
+ domain = unit.new_domain_ref(name=unsafe_name)
+ self.resource_api.create_domain(domain['id'], domain)
+ # Add a (safely named) project to that domain
+ project = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project['id'], project)
+ role_member = unit.new_role_ref()
+ self.role_api.create_role(role_member['id'], role_member)
+ self.assignment_api.create_grant(
+ role_member['id'],
+ user_id=self.user['id'],
+ project_id=project['id'])
+
+ # An auth request via project ID, but specifying domain by name
+ auth_data = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_name=project['name'],
+ project_domain_name=domain['name'])
+
+ # Since name url restriction is off, we should be able to autenticate
+ self.v3_create_token(auth_data)
+
+ # Set the name url restriction to new, which should still allow us to
+ # authenticate
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='new')
+ self.v3_create_token(auth_data)
+
+ # Set the name url restriction to strict and we should fail to
+ # authenticate
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe='strict')
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
class TestAuthJSONExternal(test_v3.RestfulTestCase):
@@ -2736,7 +3566,7 @@ class TestTrustOptional(test_v3.RestfulTestCase):
super(TestTrustOptional, self).config_overrides()
self.config_fixture.config(group='trust', enabled=False)
- def test_trusts_404(self):
+ def test_trusts_returns_not_found(self):
self.get('/OS-TRUST/trusts', body={'trust': {}},
expected_status=http_client.NOT_FOUND)
self.post('/OS-TRUST/trusts', body={'trust': {}},
@@ -2747,11 +3577,11 @@ class TestTrustOptional(test_v3.RestfulTestCase):
user_id=self.user['id'],
password=self.user['password'],
trust_id=uuid.uuid4().hex)
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.FORBIDDEN)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.FORBIDDEN)
-class TestTrustRedelegation(test_v3.RestfulTestCase):
+class TrustAPIBehavior(test_v3.RestfulTestCase):
"""Redelegation valid and secure
Redelegation is a hierarchical structure of trusts between initial trustor
@@ -2778,7 +3608,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
"""
def config_overrides(self):
- super(TestTrustRedelegation, self).config_overrides()
+ super(TrustAPIBehavior, self).config_overrides()
self.config_fixture.config(
group='trust',
enabled=True,
@@ -2787,14 +3617,13 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
)
def setUp(self):
- super(TestTrustRedelegation, self).setUp()
+ super(TrustAPIBehavior, self).setUp()
# Create a trustee to delegate stuff to
- trustee_user_ref = self.new_user_ref(domain_id=self.domain_id)
- self.trustee_user = self.identity_api.create_user(trustee_user_ref)
- self.trustee_user['password'] = trustee_user_ref['password']
+ self.trustee_user = unit.create_user(self.identity_api,
+ domain_id=self.domain_id)
# trustor->trustee
- self.redelegated_trust_ref = self.new_trust_ref(
+ self.redelegated_trust_ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
@@ -2804,7 +3633,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
allow_redelegation=True)
# trustor->trustee (no redelegation)
- self.chained_trust_ref = self.new_trust_ref(
+ self.chained_trust_ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
@@ -2865,7 +3694,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
# Attempt to create a redelegated trust supposed to last longer
# than the parent trust: let's give it 10 minutes (>1 minute).
- too_long_live_chained_trust_ref = self.new_trust_ref(
+ too_long_live_chained_trust_ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
@@ -2894,7 +3723,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
def test_roles_subset(self):
# Build second role
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
# assign a new role to the user
self.assignment_api.create_grant(role_id=role['id'],
@@ -2903,6 +3732,8 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
# Create first trust with extended set of roles
ref = self.redelegated_trust_ref
+ ref['expires_at'] = datetime.datetime.utcnow().replace(
+ year=2032).strftime(unit.TIME_FORMAT)
ref['roles'].append({'id': role['id']})
r = self.post('/OS-TRUST/trusts',
body={'trust': ref})
@@ -2915,6 +3746,9 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
trust_token = self._get_trust_token(trust)
# Chain second trust with roles subset
+ self.chained_trust_ref['expires_at'] = (
+ datetime.datetime.utcnow().replace(year=2028).strftime(
+ unit.TIME_FORMAT))
r = self.post('/OS-TRUST/trusts',
body={'trust': self.chained_trust_ref},
token=trust_token)
@@ -2927,7 +3761,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
def test_redelegate_with_role_by_name(self):
# For role by name testing
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
@@ -2935,19 +3769,23 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
expires=dict(minutes=1),
role_names=[self.role['name']],
allow_redelegation=True)
+ ref['expires_at'] = datetime.datetime.utcnow().replace(
+ year=2032).strftime(unit.TIME_FORMAT)
r = self.post('/OS-TRUST/trusts',
body={'trust': ref})
trust = self.assertValidTrustResponse(r)
# Ensure we can get a token with this trust
trust_token = self._get_trust_token(trust)
# Chain second trust with roles subset
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=True,
role_names=[self.role['name']],
allow_redelegation=True)
+ ref['expires_at'] = datetime.datetime.utcnow().replace(
+ year=2028).strftime(unit.TIME_FORMAT)
r = self.post('/OS-TRUST/trusts',
body={'trust': ref},
token=trust_token)
@@ -2962,7 +3800,7 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
trust_token = self._get_trust_token(trust)
# Build second trust with a role not in parent's roles
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
# assign a new role to the user
self.assignment_api.create_grant(role_id=role['id'],
@@ -2980,12 +3818,18 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
expected_status=http_client.FORBIDDEN)
def test_redelegation_terminator(self):
+ self.redelegated_trust_ref['expires_at'] = (
+ datetime.datetime.utcnow().replace(year=2032).strftime(
+ unit.TIME_FORMAT))
r = self.post('/OS-TRUST/trusts',
body={'trust': self.redelegated_trust_ref})
trust = self.assertValidTrustResponse(r)
trust_token = self._get_trust_token(trust)
# Build second trust - the terminator
+ self.chained_trust_ref['expires_at'] = (
+ datetime.datetime.utcnow().replace(year=2028).strftime(
+ unit.TIME_FORMAT))
ref = dict(self.chained_trust_ref,
redelegation_count=1,
allow_redelegation=False)
@@ -3007,215 +3851,64 @@ class TestTrustRedelegation(test_v3.RestfulTestCase):
token=trust_token,
expected_status=http_client.FORBIDDEN)
+ def test_redelegation_without_impersonation(self):
+ # Update trust to not allow impersonation
+ self.redelegated_trust_ref['impersonation'] = False
-class TestTrustChain(test_v3.RestfulTestCase):
-
- def config_overrides(self):
- super(TestTrustChain, self).config_overrides()
- self.config_fixture.config(
- group='trust',
- enabled=True,
- allow_redelegation=True,
- max_redelegation_count=10
- )
-
- def setUp(self):
- super(TestTrustChain, self).setUp()
- # Create trust chain
- self.user_chain = list()
- self.trust_chain = list()
- for _ in range(3):
- user_ref = self.new_user_ref(domain_id=self.domain_id)
- user = self.identity_api.create_user(user_ref)
- user['password'] = user_ref['password']
- self.user_chain.append(user)
-
- # trustor->trustee
- trustee = self.user_chain[0]
- trust_ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=trustee['id'],
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- trust_ref.update(
- allow_redelegation=True,
- redelegation_count=3)
-
- r = self.post('/OS-TRUST/trusts',
- body={'trust': trust_ref})
+ # Create trust
+ resp = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref},
+ expected_status=http_client.CREATED)
+ trust = self.assertValidTrustResponse(resp)
- trust = self.assertValidTrustResponse(r)
+ # Get trusted token without impersonation
auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'],
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
trust_id=trust['id'])
trust_token = self.get_requested_token(auth_data)
- self.trust_chain.append(trust)
-
- for trustee in self.user_chain[1:]:
- trust_ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=trustee['id'],
- project_id=self.project_id,
- impersonation=True,
- role_ids=[self.role_id])
- trust_ref.update(
- allow_redelegation=True)
- r = self.post('/OS-TRUST/trusts',
- body={'trust': trust_ref},
- token=trust_token)
- trust = self.assertValidTrustResponse(r)
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'],
- trust_id=trust['id'])
- trust_token = self.get_requested_token(auth_data)
- self.trust_chain.append(trust)
-
- trustee = self.user_chain[-1]
- trust = self.trust_chain[-1]
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password'],
- trust_id=trust['id'])
-
- self.last_token = self.get_requested_token(auth_data)
-
- def assert_user_authenticate(self, user):
- auth_data = self.build_authentication_request(
- user_id=user['id'],
- password=user['password']
- )
- r = self.v3_authenticate_token(auth_data)
- self.assertValidTokenResponse(r)
-
- def assert_trust_tokens_revoked(self, trust_id):
- trustee = self.user_chain[0]
- auth_data = self.build_authentication_request(
- user_id=trustee['id'],
- password=trustee['password']
- )
- r = self.v3_authenticate_token(auth_data)
- self.assertValidTokenResponse(r)
-
- revocation_response = self.get('/OS-REVOKE/events')
- revocation_events = revocation_response.json_body['events']
- found = False
- for event in revocation_events:
- if event.get('OS-TRUST:trust_id') == trust_id:
- found = True
- self.assertTrue(found, 'event with trust_id %s not found in list' %
- trust_id)
-
- def test_delete_trust_cascade(self):
- self.assert_user_authenticate(self.user_chain[0])
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': self.trust_chain[0]['id']},
- expected_status=204)
- headers = {'X-Subject-Token': self.last_token}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.NOT_FOUND)
- self.assert_trust_tokens_revoked(self.trust_chain[0]['id'])
-
- def test_delete_broken_chain(self):
- self.assert_user_authenticate(self.user_chain[0])
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': self.trust_chain[1]['id']},
- expected_status=204)
-
- self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': self.trust_chain[0]['id']},
- expected_status=204)
-
- def test_trustor_roles_revoked(self):
- self.assert_user_authenticate(self.user_chain[0])
-
- self.assignment_api.remove_role_from_user_and_project(
- self.user_id, self.project_id, self.role_id
- )
-
- auth_data = self.build_authentication_request(
- token=self.last_token,
- trust_id=self.trust_chain[-1]['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.NOT_FOUND)
-
- def test_intermediate_user_disabled(self):
- self.assert_user_authenticate(self.user_chain[0])
-
- disabled = self.user_chain[0]
- disabled['enabled'] = False
- self.identity_api.update_user(disabled['id'], disabled)
-
- # Bypass policy enforcement
- with mock.patch.object(rules, 'enforce', return_value=True):
- headers = {'X-Subject-Token': self.last_token}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.FORBIDDEN)
-
- def test_intermediate_user_deleted(self):
- self.assert_user_authenticate(self.user_chain[0])
-
- self.identity_api.delete_user(self.user_chain[0]['id'])
-
- # Bypass policy enforcement
- with mock.patch.object(rules, 'enforce', return_value=True):
- headers = {'X-Subject-Token': self.last_token}
- self.head('/auth/tokens', headers=headers,
- expected_status=http_client.FORBIDDEN)
-
-
-class TestTrustAuth(test_v3.RestfulTestCase):
- EXTENSION_NAME = 'revoke'
- EXTENSION_TO_ADD = 'revoke_extension'
-
- def config_overrides(self):
- super(TestTrustAuth, self).config_overrides()
- self.config_fixture.config(group='revoke', driver='kvs')
- self.config_fixture.config(
- group='token',
- provider='pki',
- revoke_by_id=False)
- self.config_fixture.config(group='trust', enabled=True)
+ # Create second user for redelegation
+ trustee_user_2 = unit.create_user(self.identity_api,
+ domain_id=self.domain_id)
- def setUp(self):
- super(TestTrustAuth, self).setUp()
-
- # create a trustee to delegate stuff to
- self.trustee_user = self.new_user_ref(domain_id=self.domain_id)
- password = self.trustee_user['password']
- self.trustee_user = self.identity_api.create_user(self.trustee_user)
- self.trustee_user['password'] = password
- self.trustee_user_id = self.trustee_user['id']
+ # Trust for redelegation
+ trust_ref_2 = unit.new_trust_ref(
+ trustor_user_id=self.trustee_user['id'],
+ trustee_user_id=trustee_user_2['id'],
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id],
+ allow_redelegation=False)
- def test_create_trust_bad_request(self):
- # The server returns a 403 Forbidden rather than a 400, see bug 1133435
- self.post('/OS-TRUST/trusts', body={'trust': {}},
- expected_status=http_client.FORBIDDEN)
+ # Creating a second trust should not be allowed since trustor does not
+ # have the role to delegate thus returning 404 NOT FOUND.
+ resp = self.post('/OS-TRUST/trusts',
+ body={'trust': trust_ref_2},
+ token=trust_token,
+ expected_status=http_client.NOT_FOUND)
def test_create_unscoped_trust(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id)
+ trustee_user_id=self.trustee_user['id'])
r = self.post('/OS-TRUST/trusts', body={'trust': ref})
self.assertValidTrustResponse(r, ref)
def test_create_trust_no_roles(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id)
self.post('/OS-TRUST/trusts', body={'trust': ref},
expected_status=http_client.FORBIDDEN)
def _initialize_test_consume_trust(self, count):
# Make sure remaining_uses is decremented as we consume the trust
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
remaining_uses=count,
role_ids=[self.role_id])
@@ -3223,30 +3916,29 @@ class TestTrustAuth(test_v3.RestfulTestCase):
# make sure the trust exists
trust = self.assertValidTrustResponse(r, ref)
r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=200)
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
# get a token for the trustee
auth_data = self.build_authentication_request(
user_id=self.trustee_user['id'],
password=self.trustee_user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = r.headers.get('X-Subject-Token')
# get a trust token, consume one use
auth_data = self.build_authentication_request(
token=token,
trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
return trust
def test_consume_trust_once(self):
trust = self._initialize_test_consume_trust(2)
# check decremented value
r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=200)
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
trust = r.result.get('trust')
self.assertIsNotNone(trust)
self.assertEqual(1, trust['remaining_uses'])
+ # FIXME(lbragstad): Assert the role that is returned is the right role.
def test_create_one_time_use_trust(self):
trust = self._initialize_test_consume_trust(1)
@@ -3259,61 +3951,15 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_create_trust_with_bad_values_for_remaining_uses(self):
- # negative values for the remaining_uses parameter are forbidden
- self._create_trust_with_bad_remaining_use(bad_value=-1)
- # 0 is a forbidden value as well
- self._create_trust_with_bad_remaining_use(bad_value=0)
- # as are non integer values
- self._create_trust_with_bad_remaining_use(bad_value="a bad value")
- self._create_trust_with_bad_remaining_use(bad_value=7.2)
-
- def _create_trust_with_bad_remaining_use(self, bad_value):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- remaining_uses=bad_value,
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_invalid_trust_request_without_impersonation(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
-
- del ref['impersonation']
-
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- expected_status=http_client.BAD_REQUEST)
-
- def test_invalid_trust_request_without_trustee(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
-
- del ref['trustee_user_id']
-
- self.post('/OS-TRUST/trusts',
- body={'trust': ref},
- expected_status=http_client.BAD_REQUEST)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_create_unlimited_use_trust(self):
# by default trusts are unlimited in terms of tokens that can be
# generated from them, this test creates such a trust explicitly
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
remaining_uses=None,
role_ids=[self.role_id])
@@ -3321,322 +3967,25 @@ class TestTrustAuth(test_v3.RestfulTestCase):
trust = self.assertValidTrustResponse(r, ref)
r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=200)
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
auth_data = self.build_authentication_request(
user_id=self.trustee_user['id'],
password=self.trustee_user['password'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
token = r.headers.get('X-Subject-Token')
auth_data = self.build_authentication_request(
token=token,
trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=200)
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
trust = r.result.get('trust')
self.assertIsNone(trust['remaining_uses'])
- def test_trust_crud(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r, ref)
-
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=200)
- self.assertValidTrustResponse(r, ref)
-
- # validate roles on the trust
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s/roles' % {
- 'trust_id': trust['id']},
- expected_status=200)
- roles = self.assertValidRoleListResponse(r, self.role)
- self.assertIn(self.role['id'], [x['id'] for x in roles])
- self.head(
- '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
- 'trust_id': trust['id'],
- 'role_id': self.role['id']},
- expected_status=200)
- r = self.get(
- '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
- 'trust_id': trust['id'],
- 'role_id': self.role['id']},
- expected_status=200)
- self.assertValidRoleResponse(r, self.role)
-
- r = self.get('/OS-TRUST/trusts', expected_status=200)
- self.assertValidTrustListResponse(r, trust)
-
- # trusts are immutable
- self.patch(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- self.delete(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=204)
-
- self.get(
- '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_trustee_404(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=uuid.uuid4().hex,
- project_id=self.project_id,
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_trustor_trustee_backwards(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.trustee_user_id,
- trustee_user_id=self.user_id,
- project_id=self.project_id,
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.FORBIDDEN)
-
- def test_create_trust_project_404(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=uuid.uuid4().hex,
- role_ids=[self.role_id])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_role_id_404(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_ids=[uuid.uuid4().hex])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_create_trust_role_name_404(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- role_names=[uuid.uuid4().hex])
- self.post('/OS-TRUST/trusts', body={'trust': ref},
- expected_status=http_client.NOT_FOUND)
-
- def test_v3_v2_intermix_trustor_not_in_default_domain_failed(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.default_domain_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(
- r, self.default_domain_user)
-
- token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(
- path=path, token=CONF.admin_token,
- method='GET', expected_status=http_client.UNAUTHORIZED)
-
- def test_v3_v2_intermix_trustor_not_in_default_domaini_failed(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.default_domain_user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.default_domain_project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project_id)
- token = self.get_requested_token(auth_data)
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(
- r, self.trustee_user)
- token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(
- path=path, token=CONF.admin_token,
- method='GET', expected_status=http_client.UNAUTHORIZED)
-
- def test_v3_v2_intermix_project_not_in_default_domaini_failed(self):
- # create a trustee in default domain to delegate stuff to
- trustee_user = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
- password = trustee_user['password']
- trustee_user = self.identity_api.create_user(trustee_user)
- trustee_user['password'] = password
- trustee_user_id = trustee_user['id']
-
- ref = self.new_trust_ref(
- trustor_user_id=self.default_domain_user_id,
- trustee_user_id=trustee_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project_id)
- token = self.get_requested_token(auth_data)
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=trustee_user['id'],
- password=trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(
- r, trustee_user)
- token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(
- path=path, token=CONF.admin_token,
- method='GET', expected_status=http_client.UNAUTHORIZED)
-
- def test_v3_v2_intermix(self):
- # create a trustee in default domain to delegate stuff to
- trustee_user = self.new_user_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID)
- password = trustee_user['password']
- trustee_user = self.identity_api.create_user(trustee_user)
- trustee_user['password'] = password
- trustee_user_id = trustee_user['id']
-
- ref = self.new_trust_ref(
- trustor_user_id=self.default_domain_user_id,
- trustee_user_id=trustee_user_id,
- project_id=self.default_domain_project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
- auth_data = self.build_authentication_request(
- user_id=self.default_domain_user['id'],
- password=self.default_domain_user['password'],
- project_id=self.default_domain_project_id)
- token = self.get_requested_token(auth_data)
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=trustee_user['id'],
- password=trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(
- r, trustee_user)
- token = r.headers.get('X-Subject-Token')
-
- # now validate the v3 token with v2 API
- path = '/v2.0/tokens/%s' % (token)
- self.admin_request(
- path=path, token=CONF.admin_token,
- method='GET', expected_status=200)
-
- def test_exercise_trust_scoped_token_without_impersonation(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(r, self.trustee_user)
- self.assertEqual(self.trustee_user['id'],
- r.result['token']['user']['id'])
- self.assertEqual(self.trustee_user['name'],
- r.result['token']['user']['name'])
- self.assertEqual(self.domain['id'],
- r.result['token']['user']['domain']['id'])
- self.assertEqual(self.domain['name'],
- r.result['token']['user']['domain']['name'])
- self.assertEqual(self.project['id'],
- r.result['token']['project']['id'])
- self.assertEqual(self.project['name'],
- r.result['token']['project']['name'])
-
- def test_exercise_trust_scoped_token_with_impersonation(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=True,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'],
- trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(r, self.user)
- self.assertEqual(self.user['id'], r.result['token']['user']['id'])
- self.assertEqual(self.user['name'], r.result['token']['user']['name'])
- self.assertEqual(self.domain['id'],
- r.result['token']['user']['domain']['id'])
- self.assertEqual(self.domain['name'],
- r.result['token']['user']['domain']['name'])
- self.assertEqual(self.project['id'],
- r.result['token']['project']['id'])
- self.assertEqual(self.project['name'],
- r.result['token']['project']['name'])
-
def test_impersonation_token_cannot_create_new_trust(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=True,
expires=dict(minutes=1),
@@ -3653,9 +4002,9 @@ class TestTrustAuth(test_v3.RestfulTestCase):
trust_token = self.get_requested_token(auth_data)
# Build second trust
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=True,
expires=dict(minutes=1),
@@ -3668,7 +4017,7 @@ class TestTrustAuth(test_v3.RestfulTestCase):
def test_trust_deleted_grant(self):
# create a new role
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
grant_url = (
@@ -3682,9 +4031,9 @@ class TestTrustAuth(test_v3.RestfulTestCase):
self.put(grant_url)
# create a trust that delegates the new role
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=False,
expires=dict(minutes=1),
@@ -3702,8 +4051,8 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data,
- expected_status=http_client.FORBIDDEN)
+ r = self.v3_create_token(auth_data,
+ expected_status=http_client.FORBIDDEN)
def test_trust_chained(self):
"""Test that a trust token can't be used to execute another trust.
@@ -3713,28 +4062,26 @@ class TestTrustAuth(test_v3.RestfulTestCase):
"""
# create a sub-trustee user
- sub_trustee_user = self.new_user_ref(
+ sub_trustee_user = unit.create_user(
+ self.identity_api,
domain_id=test_v3.DEFAULT_DOMAIN_ID)
- password = sub_trustee_user['password']
- sub_trustee_user = self.identity_api.create_user(sub_trustee_user)
- sub_trustee_user['password'] = password
sub_trustee_user_id = sub_trustee_user['id']
# create a new role
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
# assign the new role to trustee
self.put(
'/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % {
'project_id': self.project_id,
- 'user_id': self.trustee_user_id,
+ 'user_id': self.trustee_user['id'],
'role_id': role['id']})
# create a trust from trustor -> trustee
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=True,
expires=dict(minutes=1),
@@ -3744,14 +4091,14 @@ class TestTrustAuth(test_v3.RestfulTestCase):
# authenticate as trustee so we can create a second trust
auth_data = self.build_authentication_request(
- user_id=self.trustee_user_id,
+ user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
project_id=self.project_id)
token = self.get_requested_token(auth_data)
# create a trust from trustee -> sub-trustee
- ref = self.new_trust_ref(
- trustor_user_id=self.trustee_user_id,
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.trustee_user['id'],
trustee_user_id=sub_trustee_user_id,
project_id=self.project_id,
impersonation=True,
@@ -3771,12 +4118,11 @@ class TestTrustAuth(test_v3.RestfulTestCase):
auth_data = self.build_authentication_request(
token=trust_token,
trust_id=trust1['id'])
- r = self.v3_authenticate_token(auth_data,
- expected_status=http_client.FORBIDDEN)
+ r = self.v3_create_token(auth_data,
+ expected_status=http_client.FORBIDDEN)
def assertTrustTokensRevoked(self, trust_id):
- revocation_response = self.get('/OS-REVOKE/events',
- expected_status=200)
+ revocation_response = self.get('/OS-REVOKE/events')
revocation_events = revocation_response.json_body['events']
found = False
for event in revocation_events:
@@ -3786,9 +4132,9 @@ class TestTrustAuth(test_v3.RestfulTestCase):
trust_id)
def test_delete_trust_revokes_tokens(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=False,
expires=dict(minutes=1),
@@ -3800,13 +4146,12 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust_id)
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(
+ r = self.v3_create_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(
r, self.trustee_user)
trust_token = r.headers['X-Subject-Token']
self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust_id},
- expected_status=204)
+ 'trust_id': trust_id})
headers = {'X-Subject-Token': trust_token}
self.head('/auth/tokens', headers=headers,
expected_status=http_client.NOT_FOUND)
@@ -3817,9 +4162,9 @@ class TestTrustAuth(test_v3.RestfulTestCase):
self.identity_api.update_user(user['id'], user)
def test_trust_get_token_fails_if_trustor_disabled(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=False,
expires=dict(minutes=1),
@@ -3833,7 +4178,7 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- self.v3_authenticate_token(auth_data, expected_status=201)
+ self.v3_create_token(auth_data)
self.disable_user(self.user)
@@ -3841,13 +4186,13 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.FORBIDDEN)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.FORBIDDEN)
def test_trust_get_token_fails_if_trustee_disabled(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=False,
expires=dict(minutes=1),
@@ -3861,7 +4206,7 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- self.v3_authenticate_token(auth_data, expected_status=201)
+ self.v3_create_token(auth_data)
self.disable_user(self.trustee_user)
@@ -3869,13 +4214,13 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_delete_trust(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=False,
expires=dict(minutes=1),
@@ -3886,57 +4231,19 @@ class TestTrustAuth(test_v3.RestfulTestCase):
trust = self.assertValidTrustResponse(r, ref)
self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust['id']},
- expected_status=204)
-
- self.get('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust['id']},
- expected_status=http_client.NOT_FOUND)
-
- self.get('/OS-TRUST/trusts/%(trust_id)s' % {
- 'trust_id': trust['id']},
- expected_status=http_client.NOT_FOUND)
+ 'trust_id': trust['id']})
auth_data = self.build_authentication_request(
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.UNAUTHORIZED)
-
- def test_list_trusts(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=False,
- expires=dict(minutes=1),
- role_ids=[self.role_id])
-
- for i in range(3):
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- self.assertValidTrustResponse(r, ref)
-
- r = self.get('/OS-TRUST/trusts', expected_status=200)
- trusts = r.result['trusts']
- self.assertEqual(3, len(trusts))
- self.assertValidTrustListResponse(r)
-
- r = self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
- self.user_id, expected_status=200)
- trusts = r.result['trusts']
- self.assertEqual(3, len(trusts))
- self.assertValidTrustListResponse(r)
-
- r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' %
- self.user_id, expected_status=200)
- trusts = r.result['trusts']
- self.assertEqual(0, len(trusts))
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
def test_change_password_invalidates_trust_tokens(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=True,
expires=dict(minutes=1),
@@ -3949,64 +4256,52 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
+ r = self.v3_create_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(r, self.user)
+ self.assertValidProjectScopedTokenResponse(r, self.user)
trust_token = r.headers.get('X-Subject-Token')
self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
- self.user_id, expected_status=200,
- token=trust_token)
+ self.user_id, token=trust_token)
self.assertValidUserResponse(
self.patch('/users/%s' % self.trustee_user['id'],
- body={'user': {'password': uuid.uuid4().hex}},
- expected_status=200))
+ body={'user': {'password': uuid.uuid4().hex}}))
self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
self.user_id, expected_status=http_client.UNAUTHORIZED,
token=trust_token)
def test_trustee_can_do_role_ops(self):
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
- project_id=self.project_id,
- impersonation=True,
- role_ids=[self.role_id])
-
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
-
- auth_data = self.build_authentication_request(
- user_id=self.trustee_user['id'],
- password=self.trustee_user['password'])
+ resp = self.post('/OS-TRUST/trusts',
+ body={'trust': self.redelegated_trust_ref})
+ trust = self.assertValidTrustResponse(resp)
+ trust_token = self._get_trust_token(trust)
- r = self.get(
+ resp = self.get(
'/OS-TRUST/trusts/%(trust_id)s/roles' % {
'trust_id': trust['id']},
- auth=auth_data)
- self.assertValidRoleListResponse(r, self.role)
+ token=trust_token)
+ self.assertValidRoleListResponse(resp, self.role)
self.head(
'/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
'trust_id': trust['id'],
'role_id': self.role['id']},
- auth=auth_data,
- expected_status=200)
+ token=trust_token,
+ expected_status=http_client.OK)
- r = self.get(
+ resp = self.get(
'/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
'trust_id': trust['id'],
'role_id': self.role['id']},
- auth=auth_data,
- expected_status=200)
- self.assertValidRoleResponse(r, self.role)
+ token=trust_token)
+ self.assertValidRoleResponse(resp, self.role)
def test_do_not_consume_remaining_uses_when_get_token_fails(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
- trustee_user_id=self.trustee_user_id,
+ trustee_user_id=self.trustee_user['id'],
project_id=self.project_id,
impersonation=False,
expires=dict(minutes=1),
@@ -4023,13 +4318,209 @@ class TestTrustAuth(test_v3.RestfulTestCase):
user_id=self.default_domain_user['id'],
password=self.default_domain_user['password'],
trust_id=trust_id)
- self.v3_authenticate_token(auth_data,
- expected_status=http_client.FORBIDDEN)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.FORBIDDEN)
r = self.get('/OS-TRUST/trusts/%s' % trust_id)
self.assertEqual(3, r.result.get('trust').get('remaining_uses'))
+class TestTrustChain(test_v3.RestfulTestCase):
+
+ def config_overrides(self):
+ super(TestTrustChain, self).config_overrides()
+ self.config_fixture.config(
+ group='trust',
+ enabled=True,
+ allow_redelegation=True,
+ max_redelegation_count=10
+ )
+
+ def setUp(self):
+ super(TestTrustChain, self).setUp()
+ """Create a trust chain using redelegation.
+
+ A trust chain is a series of trusts that are redelegated. For example,
+ self.user_list consists of userA, userB, and userC. The first trust in
+ the trust chain is going to be established between self.user and userA,
+ call it trustA. Then, userA is going to obtain a trust scoped token
+ using trustA, and with that token create a trust between userA and
+ userB called trustB. This pattern will continue with userB creating a
+ trust with userC.
+ So the trust chain should look something like:
+ trustA -> trustB -> trustC
+ Where:
+ self.user is trusting userA with trustA
+ userA is trusting userB with trustB
+ userB is trusting userC with trustC
+
+ """
+ self.user_list = list()
+ self.trust_chain = list()
+ for _ in range(3):
+ user = unit.create_user(self.identity_api,
+ domain_id=self.domain_id)
+ self.user_list.append(user)
+
+ # trustor->trustee redelegation with impersonation
+ trustee = self.user_list[0]
+ trust_ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=trustee['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id],
+ allow_redelegation=True,
+ redelegation_count=3)
+
+ # Create a trust between self.user and the first user in the list
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': trust_ref})
+
+ trust = self.assertValidTrustResponse(r)
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'],
+ trust_id=trust['id'])
+
+ # Generate a trusted token for the first user
+ trust_token = self.get_requested_token(auth_data)
+ self.trust_chain.append(trust)
+
+ # Loop through the user to create a chain of redelegated trust.
+ for next_trustee in self.user_list[1:]:
+ trust_ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=next_trustee['id'],
+ project_id=self.project_id,
+ impersonation=True,
+ role_ids=[self.role_id],
+ allow_redelegation=True)
+ r = self.post('/OS-TRUST/trusts',
+ body={'trust': trust_ref},
+ token=trust_token)
+ trust = self.assertValidTrustResponse(r)
+ auth_data = self.build_authentication_request(
+ user_id=next_trustee['id'],
+ password=next_trustee['password'],
+ trust_id=trust['id'])
+ trust_token = self.get_requested_token(auth_data)
+ self.trust_chain.append(trust)
+
+ trustee = self.user_list[-1]
+ trust = self.trust_chain[-1]
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'],
+ trust_id=trust['id'])
+
+ self.last_token = self.get_requested_token(auth_data)
+
+ def assert_user_authenticate(self, user):
+ auth_data = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password']
+ )
+ r = self.v3_create_token(auth_data)
+ self.assertValidTokenResponse(r)
+
+ def assert_trust_tokens_revoked(self, trust_id):
+ trustee = self.user_list[0]
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password']
+ )
+ r = self.v3_create_token(auth_data)
+ self.assertValidTokenResponse(r)
+
+ revocation_response = self.get('/OS-REVOKE/events')
+ revocation_events = revocation_response.json_body['events']
+ found = False
+ for event in revocation_events:
+ if event.get('OS-TRUST:trust_id') == trust_id:
+ found = True
+ self.assertTrue(found, 'event with trust_id %s not found in list' %
+ trust_id)
+
+ def test_delete_trust_cascade(self):
+ self.assert_user_authenticate(self.user_list[0])
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': self.trust_chain[0]['id']})
+
+ headers = {'X-Subject-Token': self.last_token}
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.NOT_FOUND)
+ self.assert_trust_tokens_revoked(self.trust_chain[0]['id'])
+
+ def test_delete_broken_chain(self):
+ self.assert_user_authenticate(self.user_list[0])
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': self.trust_chain[0]['id']})
+
+ # Verify the two remaining trust have been deleted
+ for i in range(len(self.user_list) - 1):
+ auth_data = self.build_authentication_request(
+ user_id=self.user_list[i]['id'],
+ password=self.user_list[i]['password'])
+
+ auth_token = self.get_requested_token(auth_data)
+
+ # Assert chained trust have been deleted
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': self.trust_chain[i + 1]['id']},
+ token=auth_token,
+ expected_status=http_client.NOT_FOUND)
+
+ def test_trustor_roles_revoked(self):
+ self.assert_user_authenticate(self.user_list[0])
+
+ self.assignment_api.remove_role_from_user_and_project(
+ self.user_id, self.project_id, self.role_id
+ )
+
+ # Verify that users are not allowed to authenticate with trust
+ for i in range(len(self.user_list[1:])):
+ trustee = self.user_list[i]
+ auth_data = self.build_authentication_request(
+ user_id=trustee['id'],
+ password=trustee['password'])
+
+ # Attempt to authenticate with trust
+ token = self.get_requested_token(auth_data)
+ auth_data = self.build_authentication_request(
+ token=token,
+ trust_id=self.trust_chain[i - 1]['id'])
+
+ # Trustee has no delegated roles
+ self.v3_create_token(auth_data,
+ expected_status=http_client.FORBIDDEN)
+
+ def test_intermediate_user_disabled(self):
+ self.assert_user_authenticate(self.user_list[0])
+
+ disabled = self.user_list[0]
+ disabled['enabled'] = False
+ self.identity_api.update_user(disabled['id'], disabled)
+
+ # Bypass policy enforcement
+ with mock.patch.object(rules, 'enforce', return_value=True):
+ headers = {'X-Subject-Token': self.last_token}
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.FORBIDDEN)
+
+ def test_intermediate_user_deleted(self):
+ self.assert_user_authenticate(self.user_list[0])
+
+ self.identity_api.delete_user(self.user_list[0]['id'])
+
+ # Bypass policy enforcement
+ with mock.patch.object(rules, 'enforce', return_value=True):
+ headers = {'X-Subject-Token': self.last_token}
+ self.head('/auth/tokens', headers=headers,
+ expected_status=http_client.FORBIDDEN)
+
+
class TestAPIProtectionWithoutAuthContextMiddleware(test_v3.RestfulTestCase):
def test_api_protection_with_no_auth_context_in_env(self):
auth_data = self.build_authentication_request(
@@ -4045,7 +4536,7 @@ class TestAPIProtectionWithoutAuthContextMiddleware(test_v3.RestfulTestCase):
'query_string': {},
'environment': {}}
r = auth_controller.validate_token(context)
- self.assertEqual(200, r.status_code)
+ self.assertEqual(http_client.OK, r.status_code)
class TestAuthContext(unit.TestCase):
@@ -4105,9 +4596,7 @@ class TestAuthSpecificData(test_v3.RestfulTestCase):
def test_get_catalog_project_scoped_token(self):
"""Call ``GET /auth/catalog`` with a project-scoped token."""
- r = self.get(
- '/auth/catalog',
- expected_status=200)
+ r = self.get('/auth/catalog')
self.assertValidCatalogResponse(r)
def test_get_catalog_domain_scoped_token(self):
@@ -4141,7 +4630,7 @@ class TestAuthSpecificData(test_v3.RestfulTestCase):
expected_status=http_client.UNAUTHORIZED)
def test_get_projects_project_scoped_token(self):
- r = self.get('/auth/projects', expected_status=200)
+ r = self.get('/auth/projects')
self.assertThat(r.json['projects'], matchers.HasLength(1))
self.assertValidProjectListResponse(r)
@@ -4149,452 +4638,318 @@ class TestAuthSpecificData(test_v3.RestfulTestCase):
self.put(path='/domains/%s/users/%s/roles/%s' % (
self.domain['id'], self.user['id'], self.role['id']))
- r = self.get('/auth/domains', expected_status=200)
+ r = self.get('/auth/domains')
self.assertThat(r.json['domains'], matchers.HasLength(1))
self.assertValidDomainListResponse(r)
-class TestFernetTokenProvider(test_v3.RestfulTestCase):
- def setUp(self):
- super(TestFernetTokenProvider, self).setUp()
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
-
- def _make_auth_request(self, auth_data):
- resp = self.post('/auth/tokens', body=auth_data, expected_status=201)
- token = resp.headers.get('X-Subject-Token')
- self.assertLess(len(token), 255)
- return token
-
- def _get_unscoped_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'])
- return self._make_auth_request(auth_data)
-
- def _get_project_scoped_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- project_id=self.project_id)
- return self._make_auth_request(auth_data)
+class TestTrustAuthPKITokenProvider(TrustAPIBehavior, TestTrustChain):
+ def config_overrides(self):
+ super(TestTrustAuthPKITokenProvider, self).config_overrides()
+ self.config_fixture.config(group='token',
+ provider='pki',
+ revoke_by_id=False)
+ self.config_fixture.config(group='trust',
+ enabled=True)
- def _get_domain_scoped_token(self):
- auth_data = self.build_authentication_request(
- user_id=self.user['id'],
- password=self.user['password'],
- domain_id=self.domain_id)
- return self._make_auth_request(auth_data)
- def _get_trust_scoped_token(self, trustee_user, trust):
- auth_data = self.build_authentication_request(
- user_id=trustee_user['id'],
- password=trustee_user['password'],
- trust_id=trust['id'])
- return self._make_auth_request(auth_data)
-
- def _validate_token(self, token, expected_status=200):
- return self.get(
- '/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=expected_status)
+class TestTrustAuthPKIZTokenProvider(TrustAPIBehavior, TestTrustChain):
+ def config_overrides(self):
+ super(TestTrustAuthPKIZTokenProvider, self).config_overrides()
+ self.config_fixture.config(group='token',
+ provider='pkiz',
+ revoke_by_id=False)
+ self.config_fixture.config(group='trust',
+ enabled=True)
- def _revoke_token(self, token, expected_status=204):
- return self.delete(
- '/auth/tokens',
- headers={'X-Subject-Token': token},
- expected_status=expected_status)
- def _set_user_enabled(self, user, enabled=True):
- user['enabled'] = enabled
- self.identity_api.update_user(user['id'], user)
+class TestTrustAuthFernetTokenProvider(TrustAPIBehavior, TestTrustChain):
+ def config_overrides(self):
+ super(TestTrustAuthFernetTokenProvider, self).config_overrides()
+ self.config_fixture.config(group='token',
+ provider='fernet',
+ revoke_by_id=False)
+ self.config_fixture.config(group='trust',
+ enabled=True)
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
- def _create_trust(self):
- # Create a trustee user
- trustee_user_ref = self.new_user_ref(domain_id=self.domain_id)
- trustee_user = self.identity_api.create_user(trustee_user_ref)
- trustee_user['password'] = trustee_user_ref['password']
- ref = self.new_trust_ref(
- trustor_user_id=self.user_id,
- trustee_user_id=trustee_user['id'],
- project_id=self.project_id,
- impersonation=False,
- role_ids=[self.role_id])
- # Create a trust
- r = self.post('/OS-TRUST/trusts', body={'trust': ref})
- trust = self.assertValidTrustResponse(r)
- return (trustee_user, trust)
+class TestAuthFernetTokenProvider(TestAuth):
+ def setUp(self):
+ super(TestAuthFernetTokenProvider, self).setUp()
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
def config_overrides(self):
- super(TestFernetTokenProvider, self).config_overrides()
+ super(TestAuthFernetTokenProvider, self).config_overrides()
self.config_fixture.config(group='token', provider='fernet')
- def test_validate_unscoped_token(self):
- unscoped_token = self._get_unscoped_token()
- self._validate_token(unscoped_token)
+ def test_verify_with_bound_token(self):
+ self.config_fixture.config(group='token', bind='kerberos')
+ auth_data = self.build_authentication_request(
+ project_id=self.project['id'])
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ # Bind not current supported by Fernet, see bug 1433311.
+ self.v3_create_token(auth_data,
+ expected_status=http_client.NOT_IMPLEMENTED)
- def test_validate_tampered_unscoped_token_fails(self):
- unscoped_token = self._get_unscoped_token()
- tampered_token = (unscoped_token[:50] + uuid.uuid4().hex +
- unscoped_token[50 + 32:])
- self._validate_token(tampered_token,
- expected_status=http_client.NOT_FOUND)
+ def test_v2_v3_bind_token_intermix(self):
+ self.config_fixture.config(group='token', bind='kerberos')
- def test_revoke_unscoped_token(self):
- unscoped_token = self._get_unscoped_token()
- self._validate_token(unscoped_token)
- self._revoke_token(unscoped_token)
- self._validate_token(unscoped_token,
- expected_status=http_client.NOT_FOUND)
+ # we need our own user registered to the default domain because of
+ # the way external auth works.
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ body = {'auth': {}}
+ # Bind not current supported by Fernet, see bug 1433311.
+ self.admin_request(path='/v2.0/tokens',
+ method='POST',
+ body=body,
+ expected_status=http_client.NOT_IMPLEMENTED)
- def test_unscoped_token_is_invalid_after_disabling_user(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Disable the user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
+ def test_auth_with_bind_token(self):
+ self.config_fixture.config(group='token', bind=['kerberos'])
- def test_unscoped_token_is_invalid_after_enabling_disabled_user(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Disable the user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
- # Enable the user
- self._set_user_enabled(self.user)
- # Ensure validating a token for a re-enabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
+ auth_data = self.build_authentication_request()
+ remote_user = self.default_domain_user['name']
+ self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
+ 'AUTH_TYPE': 'Negotiate'})
+ # Bind not current supported by Fernet, see bug 1433311.
+ self.v3_create_token(auth_data,
+ expected_status=http_client.NOT_IMPLEMENTED)
- def test_unscoped_token_is_invalid_after_disabling_user_domain(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Disable the user's domain
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
- def test_unscoped_token_is_invalid_after_changing_user_password(self):
- unscoped_token = self._get_unscoped_token()
- # Make sure the token is valid
- self._validate_token(unscoped_token)
- # Change user's password
- self.user['password'] = 'Password1'
- self.identity_api.update_user(self.user['id'], self.user)
- # Ensure updating user's password revokes existing user's tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- unscoped_token)
+class TestAuthTOTP(test_v3.RestfulTestCase):
- def test_validate_project_scoped_token(self):
- project_scoped_token = self._get_project_scoped_token()
- self._validate_token(project_scoped_token)
+ def setUp(self):
+ super(TestAuthTOTP, self).setUp()
- def test_validate_domain_scoped_token(self):
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- resp = self._validate_token(domain_scoped_token)
- resp_json = json.loads(resp.body)
- self.assertIsNotNone(resp_json['token']['catalog'])
- self.assertIsNotNone(resp_json['token']['roles'])
- self.assertIsNotNone(resp_json['token']['domain'])
+ ref = unit.new_totp_credential(
+ user_id=self.default_domain_user['id'],
+ project_id=self.default_domain_project['id'])
- def test_validate_tampered_project_scoped_token_fails(self):
- project_scoped_token = self._get_project_scoped_token()
- tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex +
- project_scoped_token[50 + 32:])
- self._validate_token(tampered_token,
- expected_status=http_client.NOT_FOUND)
+ self.secret = ref['blob']
- def test_revoke_project_scoped_token(self):
- project_scoped_token = self._get_project_scoped_token()
- self._validate_token(project_scoped_token)
- self._revoke_token(project_scoped_token)
- self._validate_token(project_scoped_token,
- expected_status=http_client.NOT_FOUND)
+ r = self.post('/credentials', body={'credential': ref})
+ self.assertValidCredentialResponse(r, ref)
- def test_project_scoped_token_is_invalid_after_disabling_user(self):
- project_scoped_token = self._get_project_scoped_token()
- # Make sure the token is valid
- self._validate_token(project_scoped_token)
- # Disable the user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- project_scoped_token)
+ self.addCleanup(self.cleanup)
- def test_domain_scoped_token_is_invalid_after_disabling_user(self):
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- # Make sure the token is valid
- self._validate_token(domain_scoped_token)
- # Disable user
- self._set_user_enabled(self.user, enabled=False)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- domain_scoped_token)
+ def auth_plugin_config_override(self):
+ methods = ['totp', 'token', 'password']
+ super(TestAuthTOTP, self).auth_plugin_config_override(methods)
- def test_domain_scoped_token_is_invalid_after_deleting_grant(self):
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- # Make sure the token is valid
- self._validate_token(domain_scoped_token)
- # Delete access to domain
- self.assignment_api.delete_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- domain_scoped_token)
+ def _make_credentials(self, cred_type, count=1, user_id=None,
+ project_id=None, blob=None):
+ user_id = user_id or self.default_domain_user['id']
+ project_id = project_id or self.default_domain_project['id']
- def test_project_scoped_token_invalid_after_changing_user_password(self):
- project_scoped_token = self._get_project_scoped_token()
- # Make sure the token is valid
- self._validate_token(project_scoped_token)
- # Update user's password
- self.user['password'] = 'Password1'
- self.identity_api.update_user(self.user['id'], self.user)
- # Ensure updating user's password revokes existing tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- project_scoped_token)
+ creds = []
+ for __ in range(count):
+ if cred_type == 'totp':
+ ref = unit.new_totp_credential(
+ user_id=user_id, project_id=project_id, blob=blob)
+ else:
+ ref = unit.new_credential_ref(
+ user_id=user_id, project_id=project_id)
+ resp = self.post('/credentials', body={'credential': ref})
+ creds.append(resp.json['credential'])
+ return creds
+
+ def _make_auth_data_by_id(self, passcode, user_id=None):
+ return self.build_authentication_request(
+ user_id=user_id or self.default_domain_user['id'],
+ passcode=passcode,
+ project_id=self.project['id'])
- def test_project_scoped_token_invalid_after_disabling_project(self):
- project_scoped_token = self._get_project_scoped_token()
- # Make sure the token is valid
- self._validate_token(project_scoped_token)
- # Disable project
- self.project['enabled'] = False
- self.resource_api.update_project(self.project['id'], self.project)
- # Ensure validating a token for a disabled project fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- project_scoped_token)
+ def _make_auth_data_by_name(self, passcode, username, user_domain_id):
+ return self.build_authentication_request(
+ username=username,
+ user_domain_id=user_domain_id,
+ passcode=passcode,
+ project_id=self.project['id'])
- def test_domain_scoped_token_invalid_after_disabling_domain(self):
- # Grant user access to domain
+ def cleanup(self):
+ totp_creds = self.credential_api.list_credentials_for_user(
+ self.default_domain_user['id'], type='totp')
+
+ other_creds = self.credential_api.list_credentials_for_user(
+ self.default_domain_user['id'], type='other')
+
+ for cred in itertools.chain(other_creds, totp_creds):
+ self.delete('/credentials/%s' % cred['id'],
+ expected_status=http_client.NO_CONTENT)
+
+ def test_with_a_valid_passcode(self):
+ creds = self._make_credentials('totp')
+ secret = creds[-1]['blob']
+ auth_data = self._make_auth_data_by_id(
+ totp._generate_totp_passcode(secret))
+
+ self.v3_create_token(auth_data, expected_status=http_client.CREATED)
+
+ def test_with_an_invalid_passcode_and_user_credentials(self):
+ self._make_credentials('totp')
+ auth_data = self._make_auth_data_by_id('000000')
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_with_an_invalid_passcode_with_no_user_credentials(self):
+ auth_data = self._make_auth_data_by_id('000000')
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_with_a_corrupt_totp_credential(self):
+ self._make_credentials('totp', count=1, blob='0')
+ auth_data = self._make_auth_data_by_id('000000')
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_with_multiple_credentials(self):
+ self._make_credentials('other', 3)
+ creds = self._make_credentials('totp', count=3)
+ secret = creds[-1]['blob']
+
+ auth_data = self._make_auth_data_by_id(
+ totp._generate_totp_passcode(secret))
+ self.v3_create_token(auth_data, expected_status=http_client.CREATED)
+
+ def test_with_multiple_users(self):
+ # make some credentials for the existing user
+ self._make_credentials('totp', count=3)
+
+ # create a new user and their credentials
+ user = unit.create_user(self.identity_api, domain_id=self.domain_id)
self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
- domain_scoped_token = self._get_domain_scoped_token()
- # Make sure the token is valid
- self._validate_token(domain_scoped_token)
- # Disable domain
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
- # Ensure validating a token for a disabled domain fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- domain_scoped_token)
-
- def test_rescope_unscoped_token_with_trust(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- self.assertLess(len(trust_scoped_token), 255)
-
- def test_validate_a_trust_scoped_token(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- def test_validate_tampered_trust_scoped_token_fails(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Get a trust scoped token
- tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex +
- trust_scoped_token[50 + 32:])
- self._validate_token(tampered_token,
- expected_status=http_client.NOT_FOUND)
+ user_id=user['id'],
+ project_id=self.project['id'])
+ creds = self._make_credentials('totp', count=1, user_id=user['id'])
+ secret = creds[-1]['blob']
- def test_revoke_trust_scoped_token(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
- self._revoke_token(trust_scoped_token)
- self._validate_token(trust_scoped_token,
- expected_status=http_client.NOT_FOUND)
-
- def test_trust_scoped_token_is_invalid_after_disabling_trustee(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- # Disable trustee
- trustee_update_ref = dict(enabled=False)
- self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
-
- def test_trust_scoped_token_invalid_after_changing_trustee_password(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
- # Change trustee's password
- trustee_update_ref = dict(password='Password1')
- self.identity_api.update_user(trustee_user['id'], trustee_update_ref)
- # Ensure updating trustee's password revokes existing tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
-
- def test_trust_scoped_token_is_invalid_after_disabling_trustor(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
-
- # Disable the trustor
- trustor_update_ref = dict(enabled=False)
- self.identity_api.update_user(self.user['id'], trustor_update_ref)
- # Ensure validating a token for a disabled user fails
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
+ # Stop the clock otherwise there is a chance of auth failure due to
+ # getting a different TOTP between the call here and the call in the
+ # auth plugin.
+ self.useFixture(fixture.TimeFixture())
- def test_trust_scoped_token_invalid_after_changing_trustor_password(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
+ auth_data = self._make_auth_data_by_id(
+ totp._generate_totp_passcode(secret), user_id=user['id'])
+ self.v3_create_token(auth_data, expected_status=http_client.CREATED)
- # Change trustor's password
- trustor_update_ref = dict(password='Password1')
- self.identity_api.update_user(self.user['id'], trustor_update_ref)
- # Ensure updating trustor's password revokes existing user's tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
+ def test_with_multiple_users_and_invalid_credentials(self):
+ """Prevent logging in with someone else's credentials.
- def test_trust_scoped_token_invalid_after_disabled_trustor_domain(self):
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- # Validate a trust scoped token
- self._validate_token(trust_scoped_token)
+ It's very easy to forget to limit the credentials query by user.
+ Let's just test it for a sanity check.
+ """
+ # make some credentials for the existing user
+ self._make_credentials('totp', count=3)
- # Disable trustor's domain
- self.domain['enabled'] = False
- self.resource_api.update_domain(self.domain['id'], self.domain)
+ # create a new user and their credentials
+ new_user = unit.create_user(self.identity_api,
+ domain_id=self.domain_id)
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=new_user['id'],
+ project_id=self.project['id'])
+ user2_creds = self._make_credentials(
+ 'totp', count=1, user_id=new_user['id'])
+
+ user_id = self.default_domain_user['id'] # user1
+ secret = user2_creds[-1]['blob']
+
+ auth_data = self._make_auth_data_by_id(
+ totp._generate_totp_passcode(secret), user_id=user_id)
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_with_username_and_domain_id(self):
+ creds = self._make_credentials('totp')
+ secret = creds[-1]['blob']
+ auth_data = self._make_auth_data_by_name(
+ totp._generate_totp_passcode(secret),
+ username=self.default_domain_user['name'],
+ user_domain_id=self.default_domain_user['domain_id'])
- trustor_update_ref = dict(password='Password1')
- self.identity_api.update_user(self.user['id'], trustor_update_ref)
- # Ensure updating trustor's password revokes existing user's tokens
- self.assertRaises(exception.TokenNotFound,
- self.token_provider_api.validate_token,
- trust_scoped_token)
+ self.v3_create_token(auth_data, expected_status=http_client.CREATED)
- def test_v2_validate_unscoped_token_returns_unauthorized(self):
- """Test raised exception when validating unscoped token.
- Test that validating an unscoped token in v2.0 of a v3 user of a
- non-default domain returns unauthorized.
- """
- unscoped_token = self._get_unscoped_token()
- self.assertRaises(exception.Unauthorized,
- self.token_provider_api.validate_v2_token,
- unscoped_token)
+class TestFetchRevocationList(test_v3.RestfulTestCase):
+ """Test fetch token revocation list on the v3 Identity API."""
- def test_v2_validate_domain_scoped_token_returns_unauthorized(self):
- """Test raised exception when validating a domain scoped token.
+ def config_overrides(self):
+ super(TestFetchRevocationList, self).config_overrides()
+ self.config_fixture.config(group='token', revoke_by_id=True)
+
+ def test_ids_no_tokens(self):
+ # When there's no revoked tokens the response is an empty list, and
+ # the response is signed.
+ res = self.get('/auth/tokens/OS-PKI/revoked')
+ signed = res.json['signed']
+ clear = cms.cms_verify(signed, CONF.signing.certfile,
+ CONF.signing.ca_certs)
+ payload = json.loads(clear)
+ self.assertEqual({'revoked': []}, payload)
+
+ def test_ids_token(self):
+ # When there's a revoked token, it's in the response, and the response
+ # is signed.
+ token_res = self.v3_create_token(
+ self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id']))
- Test that validating an domain scoped token in v2.0
- returns unauthorized.
- """
+ token_id = token_res.headers.get('X-Subject-Token')
+ token_data = token_res.json['token']
- # Grant user access to domain
- self.assignment_api.create_grant(self.role['id'],
- user_id=self.user['id'],
- domain_id=self.domain['id'])
+ self.delete('/auth/tokens', headers={'X-Subject-Token': token_id})
- scoped_token = self._get_domain_scoped_token()
- self.assertRaises(exception.Unauthorized,
- self.token_provider_api.validate_v2_token,
- scoped_token)
+ res = self.get('/auth/tokens/OS-PKI/revoked')
+ signed = res.json['signed']
+ clear = cms.cms_verify(signed, CONF.signing.certfile,
+ CONF.signing.ca_certs)
+ payload = json.loads(clear)
- def test_v2_validate_trust_scoped_token(self):
- """Test raised exception when validating a trust scoped token.
+ def truncate(ts_str):
+ return ts_str[:19] + 'Z' # 2016-01-21T15:53:52 == 19 chars.
- Test that validating an trust scoped token in v2.0 returns
- unauthorized.
- """
+ exp_token_revoke_data = {
+ 'id': token_id,
+ 'audit_id': token_data['audit_ids'][0],
+ 'expires': truncate(token_data['expires_at']),
+ }
- trustee_user, trust = self._create_trust()
- trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust)
- self.assertRaises(exception.Unauthorized,
- self.token_provider_api.validate_v2_token,
- trust_scoped_token)
+ self.assertEqual({'revoked': [exp_token_revoke_data]}, payload)
+ def test_audit_id_only_no_tokens(self):
+ # When there's no revoked tokens and ?audit_id_only is used, the
+ # response is an empty list and is not signed.
+ res = self.get('/auth/tokens/OS-PKI/revoked?audit_id_only')
+ self.assertEqual({'revoked': []}, res.json)
-class TestAuthFernetTokenProvider(TestAuth):
- def setUp(self):
- super(TestAuthFernetTokenProvider, self).setUp()
- self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+ def test_audit_id_only_token(self):
+ # When there's a revoked token and ?audit_id_only is used, the
+ # response contains the audit_id of the token and is not signed.
+ token_res = self.v3_create_token(
+ self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ project_id=self.project['id']))
- def config_overrides(self):
- super(TestAuthFernetTokenProvider, self).config_overrides()
- self.config_fixture.config(group='token', provider='fernet')
+ token_id = token_res.headers.get('X-Subject-Token')
+ token_data = token_res.json['token']
- def test_verify_with_bound_token(self):
- self.config_fixture.config(group='token', bind='kerberos')
- auth_data = self.build_authentication_request(
- project_id=self.project['id'])
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- # Bind not current supported by Fernet, see bug 1433311.
- self.v3_authenticate_token(auth_data, expected_status=501)
+ self.delete('/auth/tokens', headers={'X-Subject-Token': token_id})
- def test_v2_v3_bind_token_intermix(self):
- self.config_fixture.config(group='token', bind='kerberos')
+ res = self.get('/auth/tokens/OS-PKI/revoked?audit_id_only')
- # we need our own user registered to the default domain because of
- # the way external auth works.
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- body = {'auth': {}}
- # Bind not current supported by Fernet, see bug 1433311.
- self.admin_request(path='/v2.0/tokens',
- method='POST',
- body=body,
- expected_status=501)
+ def truncate(ts_str):
+ return ts_str[:19] + 'Z' # 2016-01-21T15:53:52 == 19 chars.
- def test_auth_with_bind_token(self):
- self.config_fixture.config(group='token', bind=['kerberos'])
+ exp_token_revoke_data = {
+ 'audit_id': token_data['audit_ids'][0],
+ 'expires': truncate(token_data['expires_at']),
+ }
- auth_data = self.build_authentication_request()
- remote_user = self.default_domain_user['name']
- self.admin_app.extra_environ.update({'REMOTE_USER': remote_user,
- 'AUTH_TYPE': 'Negotiate'})
- # Bind not current supported by Fernet, see bug 1433311.
- self.v3_authenticate_token(auth_data, expected_status=501)
+ self.assertEqual({'revoked': [exp_token_revoke_data]}, res.json)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_catalog.py b/keystone-moon/keystone/tests/unit/test_v3_catalog.py
index c536169a..2eb9db14 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_catalog.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_catalog.py
@@ -31,12 +31,12 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_region_with_id(self):
"""Call ``PUT /regions/{region_id}`` w/o an ID in the request body."""
- ref = self.new_region_ref()
+ ref = unit.new_region_ref()
region_id = ref.pop('id')
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
- expected_status=201)
+ expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
@@ -44,12 +44,12 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_region_with_matching_ids(self):
"""Call ``PUT /regions/{region_id}`` with an ID in the request body."""
- ref = self.new_region_ref()
+ ref = unit.new_region_ref()
region_id = ref['id']
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
- expected_status=201)
+ expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
@@ -60,16 +60,16 @@ class CatalogTestCase(test_v3.RestfulTestCase):
ref = dict(description="my region")
self.put(
'/regions/myregion',
- body={'region': ref}, expected_status=201)
+ body={'region': ref}, expected_status=http_client.CREATED)
# Create region again with duplicate id
self.put(
'/regions/myregion',
- body={'region': ref}, expected_status=409)
+ body={'region': ref}, expected_status=http_client.CONFLICT)
def test_create_region(self):
"""Call ``POST /regions`` with an ID in the request body."""
# the ref will have an ID defined on it
- ref = self.new_region_ref()
+ ref = unit.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
@@ -83,39 +83,30 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_region_with_empty_id(self):
"""Call ``POST /regions`` with an empty ID in the request body."""
- ref = self.new_region_ref()
- ref['id'] = ''
+ ref = unit.new_region_ref(id='')
- r = self.post(
- '/regions',
- body={'region': ref}, expected_status=201)
+ r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
self.assertNotEmpty(r.result['region'].get('id'))
def test_create_region_without_id(self):
"""Call ``POST /regions`` without an ID in the request body."""
- ref = self.new_region_ref()
+ ref = unit.new_region_ref()
# instead of defining the ID ourselves...
del ref['id']
# let the service define the ID
- r = self.post(
- '/regions',
- body={'region': ref},
- expected_status=201)
+ r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
def test_create_region_without_description(self):
"""Call ``POST /regions`` without description in the request body."""
- ref = self.new_region_ref()
+ ref = unit.new_region_ref(description=None)
del ref['description']
- r = self.post(
- '/regions',
- body={'region': ref},
- expected_status=201)
+ r = self.post('/regions', body={'region': ref})
# Create the description in the reference to compare to since the
# response should now have a description, even though we didn't send
# it with the original reference.
@@ -123,51 +114,34 @@ class CatalogTestCase(test_v3.RestfulTestCase):
self.assertValidRegionResponse(r, ref)
def test_create_regions_with_same_description_string(self):
- """Call ``POST /regions`` with same description in the request bodies.
- """
+ """Call ``POST /regions`` with duplicate descriptions."""
# NOTE(lbragstad): Make sure we can create two regions that have the
# same description.
- ref1 = self.new_region_ref()
- ref2 = self.new_region_ref()
-
region_desc = 'Some Region Description'
- ref1['description'] = region_desc
- ref2['description'] = region_desc
+ ref1 = unit.new_region_ref(description=region_desc)
+ ref2 = unit.new_region_ref(description=region_desc)
- resp1 = self.post(
- '/regions',
- body={'region': ref1},
- expected_status=201)
+ resp1 = self.post('/regions', body={'region': ref1})
self.assertValidRegionResponse(resp1, ref1)
- resp2 = self.post(
- '/regions',
- body={'region': ref2},
- expected_status=201)
+ resp2 = self.post('/regions', body={'region': ref2})
self.assertValidRegionResponse(resp2, ref2)
def test_create_regions_without_descriptions(self):
- """Call ``POST /regions`` with no description in the request bodies.
- """
+ """Call ``POST /regions`` with no description."""
# NOTE(lbragstad): Make sure we can create two regions that have
# no description in the request body. The description should be
# populated by Catalog Manager.
- ref1 = self.new_region_ref()
- ref2 = self.new_region_ref()
+ ref1 = unit.new_region_ref()
+ ref2 = unit.new_region_ref()
del ref1['description']
ref2['description'] = None
- resp1 = self.post(
- '/regions',
- body={'region': ref1},
- expected_status=201)
+ resp1 = self.post('/regions', body={'region': ref1})
- resp2 = self.post(
- '/regions',
- body={'region': ref2},
- expected_status=201)
+ resp2 = self.post('/regions', body={'region': ref2})
# Create the descriptions in the references to compare to since the
# responses should now have descriptions, even though we didn't send
# a description with the original references.
@@ -179,7 +153,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_region_with_conflicting_ids(self):
"""Call ``PUT /regions/{region_id}`` with conflicting region IDs."""
# the region ref is created with an ID
- ref = self.new_region_ref()
+ ref = unit.new_region_ref()
# but instead of using that ID, make up a new, conflicting one
self.put(
@@ -193,8 +167,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
self.assertValidRegionListResponse(r, ref=self.region)
def _create_region_with_parent_id(self, parent_id=None):
- ref = self.new_region_ref()
- ref['parent_region_id'] = parent_id
+ ref = unit.new_region_ref(parent_region_id=parent_id)
return self.post(
'/regions',
body={'region': ref})
@@ -220,7 +193,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_update_region(self):
"""Call ``PATCH /regions/{region_id}``."""
- region = self.new_region_ref()
+ region = unit.new_region_ref()
del region['id']
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
@@ -229,18 +202,16 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_update_region_without_description_keeps_original(self):
"""Call ``PATCH /regions/{region_id}``."""
- region_ref = self.new_region_ref()
+ region_ref = unit.new_region_ref()
- resp = self.post('/regions', body={'region': region_ref},
- expected_status=201)
+ resp = self.post('/regions', body={'region': region_ref})
region_updates = {
# update with something that's not the description
'parent_region_id': self.region_id,
}
resp = self.patch('/regions/%s' % region_ref['id'],
- body={'region': region_updates},
- expected_status=200)
+ body={'region': region_updates})
# NOTE(dstanek): Keystone should keep the original description.
self.assertEqual(region_ref['description'],
@@ -248,9 +219,8 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_update_region_with_null_description(self):
"""Call ``PATCH /regions/{region_id}``."""
- region = self.new_region_ref()
+ region = unit.new_region_ref(description=None)
del region['id']
- region['description'] = None
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
body={'region': region})
@@ -262,8 +232,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_delete_region(self):
"""Call ``DELETE /regions/{region_id}``."""
-
- ref = self.new_region_ref()
+ ref = unit.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
@@ -276,7 +245,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_service(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
+ ref = unit.new_service_ref()
r = self.post(
'/services',
body={'service': ref})
@@ -284,7 +253,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_service_no_name(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
+ ref = unit.new_service_ref()
del ref['name']
r = self.post(
'/services',
@@ -294,7 +263,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_service_no_enabled(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
+ ref = unit.new_service_ref()
del ref['enabled']
r = self.post(
'/services',
@@ -305,8 +274,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_service_enabled_false(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
- ref['enabled'] = False
+ ref = unit.new_service_ref(enabled=False)
r = self.post(
'/services',
body={'service': ref})
@@ -315,8 +283,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_service_enabled_true(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
- ref['enabled'] = True
+ ref = unit.new_service_ref(enabled=True)
r = self.post(
'/services',
body={'service': ref})
@@ -325,22 +292,19 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_service_enabled_str_true(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
- ref['enabled'] = 'True'
+ ref = unit.new_service_ref(enabled='True')
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_false(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
- ref['enabled'] = 'False'
+ ref = unit.new_service_ref(enabled='False')
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_random(self):
"""Call ``POST /services``."""
- ref = self.new_service_ref()
- ref['enabled'] = 'puppies'
+ ref = unit.new_service_ref(enabled='puppies')
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
@@ -350,8 +314,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
self.assertValidServiceListResponse(r, ref=self.service)
def _create_random_service(self):
- ref = self.new_service_ref()
- ref['enabled'] = True
+ ref = unit.new_service_ref()
response = self.post(
'/services',
body={'service': ref})
@@ -399,7 +362,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_update_service(self):
"""Call ``PATCH /services/{service_id}``."""
- service = self.new_service_ref()
+ service = unit.new_service_ref()
del service['id']
r = self.patch('/services/%(service_id)s' % {
'service_id': self.service_id},
@@ -423,7 +386,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
region = self._create_region_with_parent_id(
parent_id=parent_region_id)
service = self._create_random_service()
- ref = self.new_endpoint_ref(
+ ref = unit.new_endpoint_ref(
service_id=service['id'],
interface=interface,
region_id=region.result['region']['id'])
@@ -547,87 +510,84 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_create_endpoint_no_enabled(self):
"""Call ``POST /endpoints``."""
- ref = self.new_endpoint_ref(service_id=self.service_id)
- r = self.post(
- '/endpoints',
- body={'endpoint': ref})
+ ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
+ r = self.post('/endpoints', body={'endpoint': ref})
ref['enabled'] = True
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_true(self):
"""Call ``POST /endpoints`` with enabled: true."""
- ref = self.new_endpoint_ref(service_id=self.service_id,
+ ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id,
enabled=True)
- r = self.post(
- '/endpoints',
- body={'endpoint': ref})
+ r = self.post('/endpoints', body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_false(self):
"""Call ``POST /endpoints`` with enabled: false."""
- ref = self.new_endpoint_ref(service_id=self.service_id,
+ ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id,
enabled=False)
- r = self.post(
- '/endpoints',
- body={'endpoint': ref})
+ r = self.post('/endpoints', body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_str_true(self):
"""Call ``POST /endpoints`` with enabled: 'True'."""
- ref = self.new_endpoint_ref(service_id=self.service_id,
+ ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id,
enabled='True')
- self.post(
- '/endpoints',
- body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
+ self.post('/endpoints', body={'endpoint': ref},
+ expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_false(self):
"""Call ``POST /endpoints`` with enabled: 'False'."""
- ref = self.new_endpoint_ref(service_id=self.service_id,
+ ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id,
enabled='False')
- self.post(
- '/endpoints',
- body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
+ self.post('/endpoints', body={'endpoint': ref},
+ expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_random(self):
"""Call ``POST /endpoints`` with enabled: 'puppies'."""
- ref = self.new_endpoint_ref(service_id=self.service_id,
+ ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id,
enabled='puppies')
- self.post(
- '/endpoints',
- body={'endpoint': ref},
- expected_status=http_client.BAD_REQUEST)
+ self.post('/endpoints', body={'endpoint': ref},
+ expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_invalid_region_id(self):
"""Call ``POST /endpoints``."""
- ref = self.new_endpoint_ref(service_id=self.service_id)
- ref["region_id"] = uuid.uuid4().hex
+ ref = unit.new_endpoint_ref(service_id=self.service_id)
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_region(self):
- """EndpointV3 creates the region before creating the endpoint, if
- endpoint is provided with 'region' and no 'region_id'
+ """EndpointV3 creates the region before creating the endpoint.
+
+ This occurs when endpoint is provided with 'region' and no 'region_id'.
"""
- ref = self.new_endpoint_ref(service_id=self.service_id)
- ref["region"] = uuid.uuid4().hex
- ref.pop('region_id')
- self.post('/endpoints', body={'endpoint': ref}, expected_status=201)
+ ref = unit.new_endpoint_ref_with_region(service_id=self.service_id,
+ region=uuid.uuid4().hex)
+ self.post('/endpoints', body={'endpoint': ref})
# Make sure the region is created
- self.get('/regions/%(region_id)s' % {
- 'region_id': ref["region"]})
+ self.get('/regions/%(region_id)s' % {'region_id': ref["region"]})
def test_create_endpoint_with_no_region(self):
"""EndpointV3 allows to creates the endpoint without region."""
- ref = self.new_endpoint_ref(service_id=self.service_id)
- ref.pop('region_id')
- self.post('/endpoints', body={'endpoint': ref}, expected_status=201)
+ ref = unit.new_endpoint_ref(service_id=self.service_id, region_id=None)
+ del ref['region_id'] # cannot just be None, it needs to not exist
+ self.post('/endpoints', body={'endpoint': ref})
def test_create_endpoint_with_empty_url(self):
"""Call ``POST /endpoints``."""
- ref = self.new_endpoint_ref(service_id=self.service_id)
- ref["url"] = ''
+ ref = unit.new_endpoint_ref(service_id=self.service_id, url='')
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
@@ -640,7 +600,9 @@ class CatalogTestCase(test_v3.RestfulTestCase):
def test_update_endpoint(self):
"""Call ``PATCH /endpoints/{endpoint_id}``."""
- ref = self.new_endpoint_ref(service_id=self.service_id)
+ ref = unit.new_endpoint_ref(service_id=self.service_id,
+ interface='public',
+ region_id=self.region_id)
del ref['id']
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
@@ -704,13 +666,12 @@ class CatalogTestCase(test_v3.RestfulTestCase):
'endpoint_id': self.endpoint_id})
# create a v3 endpoint ref, and then tweak it back to a v2-style ref
- ref = self.new_endpoint_ref(service_id=self.service['id'])
+ ref = unit.new_endpoint_ref_with_region(service_id=self.service['id'],
+ region=uuid.uuid4().hex,
+ internalurl=None)
del ref['id']
del ref['interface']
ref['publicurl'] = ref.pop('url')
- ref['internalurl'] = None
- ref['region'] = ref['region_id']
- del ref['region_id']
# don't set adminurl to ensure it's absence is handled like internalurl
# create the endpoint on v2 (using a v3 token)
@@ -751,15 +712,16 @@ class CatalogTestCase(test_v3.RestfulTestCase):
self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id'])
def test_deleting_endpoint_with_space_in_url(self):
- # create a v3 endpoint ref
- ref = self.new_endpoint_ref(service_id=self.service['id'])
-
# add a space to all urls (intentional "i d" to test bug)
url_with_space = "http://127.0.0.1:8774 /v1.1/\$(tenant_i d)s"
- ref['publicurl'] = url_with_space
- ref['internalurl'] = url_with_space
- ref['adminurl'] = url_with_space
- ref['url'] = url_with_space
+
+ # create a v3 endpoint ref
+ ref = unit.new_endpoint_ref(service_id=self.service['id'],
+ region_id=None,
+ publicurl=url_with_space,
+ internalurl=url_with_space,
+ adminurl=url_with_space,
+ url=url_with_space)
# add the endpoint to the database
self.catalog_api.create_endpoint(ref['id'], ref)
@@ -767,7 +729,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
# delete the endpoint
self.delete('/endpoints/%s' % ref['id'])
- # make sure it's deleted (GET should return 404)
+ # make sure it's deleted (GET should return Not Found)
self.get('/endpoints/%s' % ref['id'],
expected_status=http_client.NOT_FOUND)
@@ -776,15 +738,24 @@ class CatalogTestCase(test_v3.RestfulTestCase):
# list one valid url is enough, no need to list too much
valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
- ref = self.new_endpoint_ref(self.service_id)
- ref['url'] = valid_url
- self.post('/endpoints',
- body={'endpoint': ref},
- expected_status=201)
+ ref = unit.new_endpoint_ref(self.service_id,
+ interface='public',
+ region_id=self.region_id,
+ url=valid_url)
+ self.post('/endpoints', body={'endpoint': ref})
+
+ def test_endpoint_create_with_valid_url_project_id(self):
+ """Create endpoint with valid url should be tested,too."""
+ valid_url = 'http://127.0.0.1:8774/v1.1/$(project_id)s'
+
+ ref = unit.new_endpoint_ref(self.service_id,
+ interface='public',
+ region_id=self.region_id,
+ url=valid_url)
+ self.post('/endpoints', body={'endpoint': ref})
def test_endpoint_create_with_invalid_url(self):
- """Test the invalid cases: substitutions is not exactly right.
- """
+ """Test the invalid cases: substitutions is not exactly right."""
invalid_urls = [
# using a substitution that is not whitelisted - KeyError
'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
@@ -799,7 +770,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
'http://127.0.0.1:8774/v1.1/$(admin_url)d',
]
- ref = self.new_endpoint_ref(self.service_id)
+ ref = unit.new_endpoint_ref(self.service_id)
for invalid_url in invalid_urls:
ref['url'] = invalid_url
@@ -809,37 +780,30 @@ class CatalogTestCase(test_v3.RestfulTestCase):
class TestCatalogAPISQL(unit.TestCase):
- """Tests for the catalog Manager against the SQL backend.
-
- """
+ """Tests for the catalog Manager against the SQL backend."""
def setUp(self):
super(TestCatalogAPISQL, self).setUp()
self.useFixture(database.Database())
self.catalog_api = catalog.Manager()
- self.service_id = uuid.uuid4().hex
- service = {'id': self.service_id, 'name': uuid.uuid4().hex}
+ service = unit.new_service_ref()
+ self.service_id = service['id']
self.catalog_api.create_service(self.service_id, service)
- endpoint = self.new_endpoint_ref(service_id=self.service_id)
+ self.create_endpoint(service_id=self.service_id)
+
+ def create_endpoint(self, service_id, **kwargs):
+ endpoint = unit.new_endpoint_ref(service_id=service_id,
+ region_id=None, **kwargs)
+
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ return endpoint
def config_overrides(self):
super(TestCatalogAPISQL, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
- def new_endpoint_ref(self, service_id):
- return {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'interface': uuid.uuid4().hex[:8],
- 'service_id': service_id,
- 'url': uuid.uuid4().hex,
- 'region': uuid.uuid4().hex,
- }
-
def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
@@ -851,14 +815,12 @@ class TestCatalogAPISQL(unit.TestCase):
self.assertEqual(1, len(self.catalog_api.list_endpoints()))
# create a new, invalid endpoint - malformed type declaration
- ref = self.new_endpoint_ref(self.service_id)
- ref['url'] = 'http://keystone/%(tenant_id)'
- self.catalog_api.create_endpoint(ref['id'], ref)
+ self.create_endpoint(self.service_id,
+ url='http://keystone/%(tenant_id)')
# create a new, invalid endpoint - nonexistent key
- ref = self.new_endpoint_ref(self.service_id)
- ref['url'] = 'http://keystone/%(you_wont_find_me)s'
- self.catalog_api.create_endpoint(ref['id'], ref)
+ self.create_endpoint(self.service_id,
+ url='http://keystone/%(you_wont_find_me)s')
# verify that the invalid endpoints don't appear in the catalog
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
@@ -867,9 +829,8 @@ class TestCatalogAPISQL(unit.TestCase):
self.assertEqual(3, len(self.catalog_api.list_endpoints()))
# create another valid endpoint - tenant_id will be replaced
- ref = self.new_endpoint_ref(self.service_id)
- ref['url'] = 'http://keystone/%(tenant_id)s'
- self.catalog_api.create_endpoint(ref['id'], ref)
+ self.create_endpoint(self.service_id,
+ url='http://keystone/%(tenant_id)s')
# there are two valid endpoints, positive check
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
@@ -877,7 +838,8 @@ class TestCatalogAPISQL(unit.TestCase):
# If the URL has no 'tenant_id' to substitute, we will skip the
# endpoint which contains this kind of URL, negative check.
- catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id=None)
+ tenant_id = None
+ catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
def test_get_catalog_always_returns_service_name(self):
@@ -885,23 +847,15 @@ class TestCatalogAPISQL(unit.TestCase):
tenant_id = uuid.uuid4().hex
# create a service, with a name
- named_svc = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- }
+ named_svc = unit.new_service_ref()
self.catalog_api.create_service(named_svc['id'], named_svc)
- endpoint = self.new_endpoint_ref(service_id=named_svc['id'])
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.create_endpoint(service_id=named_svc['id'])
# create a service, with no name
- unnamed_svc = {
- 'id': uuid.uuid4().hex,
- 'type': uuid.uuid4().hex
- }
+ unnamed_svc = unit.new_service_ref(name=None)
+ del unnamed_svc['name']
self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
- endpoint = self.new_endpoint_ref(service_id=unnamed_svc['id'])
- self.catalog_api.create_endpoint(endpoint['id'], endpoint)
+ self.create_endpoint(service_id=unnamed_svc['id'])
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
@@ -917,9 +871,7 @@ class TestCatalogAPISQL(unit.TestCase):
# TODO(dstanek): this needs refactoring with the test above, but we are in a
# crunch so that will happen in a future patch.
class TestCatalogAPISQLRegions(unit.TestCase):
- """Tests for the catalog Manager against the SQL backend.
-
- """
+ """Tests for the catalog Manager against the SQL backend."""
def setUp(self):
super(TestCatalogAPISQLRegions, self).setUp()
@@ -930,23 +882,13 @@ class TestCatalogAPISQLRegions(unit.TestCase):
super(TestCatalogAPISQLRegions, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
- def new_endpoint_ref(self, service_id):
- return {
- 'id': uuid.uuid4().hex,
- 'name': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'interface': uuid.uuid4().hex[:8],
- 'service_id': service_id,
- 'url': uuid.uuid4().hex,
- 'region_id': uuid.uuid4().hex,
- }
-
def test_get_catalog_returns_proper_endpoints_with_no_region(self):
- service_id = uuid.uuid4().hex
- service = {'id': service_id, 'name': uuid.uuid4().hex}
+ service = unit.new_service_ref()
+ service_id = service['id']
self.catalog_api.create_service(service_id, service)
- endpoint = self.new_endpoint_ref(service_id=service_id)
+ endpoint = unit.new_endpoint_ref(service_id=service_id,
+ region_id=None)
del endpoint['region_id']
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
@@ -958,12 +900,13 @@ class TestCatalogAPISQLRegions(unit.TestCase):
catalog[0]['endpoints'][0], ref=endpoint)
def test_get_catalog_returns_proper_endpoints_with_region(self):
- service_id = uuid.uuid4().hex
- service = {'id': service_id, 'name': uuid.uuid4().hex}
+ service = unit.new_service_ref()
+ service_id = service['id']
self.catalog_api.create_service(service_id, service)
- endpoint = self.new_endpoint_ref(service_id=service_id)
- self.catalog_api.create_region({'id': endpoint['region_id']})
+ endpoint = unit.new_endpoint_ref(service_id=service_id)
+ region = unit.new_region_ref(id=endpoint['region_id'])
+ self.catalog_api.create_region(region)
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
endpoint = self.catalog_api.get_endpoint(endpoint['id'])
diff --git a/keystone-moon/keystone/tests/unit/test_v3_credential.py b/keystone-moon/keystone/tests/unit/test_v3_credential.py
index dd8cf2dd..07995f19 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_credential.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_credential.py
@@ -21,49 +21,46 @@ from oslo_config import cfg
from six.moves import http_client
from testtools import matchers
+from keystone.common import utils
+from keystone.contrib.ec2 import controllers
from keystone import exception
+from keystone.tests import unit
from keystone.tests.unit import test_v3
CONF = cfg.CONF
+CRED_TYPE_EC2 = controllers.CRED_TYPE_EC2
class CredentialBaseTestCase(test_v3.RestfulTestCase):
def _create_dict_blob_credential(self):
- blob = {"access": uuid.uuid4().hex,
- "secret": uuid.uuid4().hex}
- credential_id = hashlib.sha256(blob['access']).hexdigest()
- credential = self.new_credential_ref(
- user_id=self.user['id'],
- project_id=self.project_id)
- credential['id'] = credential_id
+ blob, credential = unit.new_ec2_credential(user_id=self.user['id'],
+ project_id=self.project_id)
# Store the blob as a dict *not* JSON ref bug #1259584
# This means we can test the dict->json workaround, added
# as part of the bugfix for backwards compatibility works.
credential['blob'] = blob
- credential['type'] = 'ec2'
+ credential_id = credential['id']
+
# Create direct via the DB API to avoid validation failure
- self.credential_api.create_credential(
- credential_id,
- credential)
- expected_blob = json.dumps(blob)
- return expected_blob, credential_id
+ self.credential_api.create_credential(credential_id, credential)
+
+ return json.dumps(blob), credential_id
class CredentialTestCase(CredentialBaseTestCase):
"""Test credential CRUD."""
+
def setUp(self):
super(CredentialTestCase, self).setUp()
- self.credential_id = uuid.uuid4().hex
- self.credential = self.new_credential_ref(
- user_id=self.user['id'],
- project_id=self.project_id)
- self.credential['id'] = self.credential_id
+ self.credential = unit.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id)
+
self.credential_api.create_credential(
- self.credential_id,
+ self.credential['id'],
self.credential)
def test_credential_api_delete_credentials_for_project(self):
@@ -72,7 +69,7 @@ class CredentialTestCase(CredentialBaseTestCase):
# once we delete all credentials for self.project_id
self.assertRaises(exception.CredentialNotFound,
self.credential_api.get_credential,
- credential_id=self.credential_id)
+ credential_id=self.credential['id'])
def test_credential_api_delete_credentials_for_user(self):
self.credential_api.delete_credentials_for_user(self.user_id)
@@ -80,7 +77,7 @@ class CredentialTestCase(CredentialBaseTestCase):
# once we delete all credentials for self.user_id
self.assertRaises(exception.CredentialNotFound,
self.credential_api.get_credential,
- credential_id=self.credential_id)
+ credential_id=self.credential['id'])
def test_list_credentials(self):
"""Call ``GET /credentials``."""
@@ -89,10 +86,8 @@ class CredentialTestCase(CredentialBaseTestCase):
def test_list_credentials_filtered_by_user_id(self):
"""Call ``GET /credentials?user_id={user_id}``."""
- credential = self.new_credential_ref(
- user_id=uuid.uuid4().hex)
- self.credential_api.create_credential(
- credential['id'], credential)
+ credential = unit.new_credential_ref(user_id=uuid.uuid4().hex)
+ self.credential_api.create_credential(credential['id'], credential)
r = self.get('/credentials?user_id=%s' % self.user['id'])
self.assertValidCredentialListResponse(r, ref=self.credential)
@@ -103,9 +98,9 @@ class CredentialTestCase(CredentialBaseTestCase):
"""Call ``GET /credentials?type={type}``."""
# The type ec2 was chosen, instead of a random string,
# because the type must be in the list of supported types
- ec2_credential = self.new_credential_ref(user_id=uuid.uuid4().hex,
+ ec2_credential = unit.new_credential_ref(user_id=uuid.uuid4().hex,
project_id=self.project_id,
- cred_type='ec2')
+ type=CRED_TYPE_EC2)
ec2_resp = self.credential_api.create_credential(
ec2_credential['id'], ec2_credential)
@@ -123,8 +118,8 @@ class CredentialTestCase(CredentialBaseTestCase):
cred_ec2 = r_ec2.result['credentials'][0]
self.assertValidCredentialListResponse(r_ec2, ref=ec2_resp)
- self.assertEqual('ec2', cred_ec2['type'])
- self.assertEqual(cred_ec2['id'], ec2_credential['id'])
+ self.assertEqual(CRED_TYPE_EC2, cred_ec2['type'])
+ self.assertEqual(ec2_credential['id'], cred_ec2['id'])
def test_list_credentials_filtered_by_type_and_user_id(self):
"""Call ``GET /credentials?user_id={user_id}&type={type}``."""
@@ -132,12 +127,10 @@ class CredentialTestCase(CredentialBaseTestCase):
user2_id = uuid.uuid4().hex
# Creating credentials for two different users
- credential_user1_ec2 = self.new_credential_ref(
- user_id=user1_id, cred_type='ec2')
- credential_user1_cert = self.new_credential_ref(
- user_id=user1_id)
- credential_user2_cert = self.new_credential_ref(
- user_id=user2_id)
+ credential_user1_ec2 = unit.new_credential_ref(user_id=user1_id,
+ type=CRED_TYPE_EC2)
+ credential_user1_cert = unit.new_credential_ref(user_id=user1_id)
+ credential_user2_cert = unit.new_credential_ref(user_id=user2_id)
self.credential_api.create_credential(
credential_user1_ec2['id'], credential_user1_ec2)
@@ -150,12 +143,12 @@ class CredentialTestCase(CredentialBaseTestCase):
self.assertValidCredentialListResponse(r, ref=credential_user1_ec2)
self.assertThat(r.result['credentials'], matchers.HasLength(1))
cred = r.result['credentials'][0]
- self.assertEqual('ec2', cred['type'])
+ self.assertEqual(CRED_TYPE_EC2, cred['type'])
self.assertEqual(user1_id, cred['user_id'])
def test_create_credential(self):
"""Call ``POST /credentials``."""
- ref = self.new_credential_ref(user_id=self.user['id'])
+ ref = unit.new_credential_ref(user_id=self.user['id'])
r = self.post(
'/credentials',
body={'credential': ref})
@@ -165,18 +158,17 @@ class CredentialTestCase(CredentialBaseTestCase):
"""Call ``GET /credentials/{credential_id}``."""
r = self.get(
'/credentials/%(credential_id)s' % {
- 'credential_id': self.credential_id})
+ 'credential_id': self.credential['id']})
self.assertValidCredentialResponse(r, self.credential)
def test_update_credential(self):
"""Call ``PATCH /credentials/{credential_id}``."""
- ref = self.new_credential_ref(
- user_id=self.user['id'],
- project_id=self.project_id)
+ ref = unit.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id)
del ref['id']
r = self.patch(
'/credentials/%(credential_id)s' % {
- 'credential_id': self.credential_id},
+ 'credential_id': self.credential['id']},
body={'credential': ref})
self.assertValidCredentialResponse(r, ref)
@@ -184,29 +176,24 @@ class CredentialTestCase(CredentialBaseTestCase):
"""Call ``DELETE /credentials/{credential_id}``."""
self.delete(
'/credentials/%(credential_id)s' % {
- 'credential_id': self.credential_id})
+ 'credential_id': self.credential['id']})
def test_create_ec2_credential(self):
"""Call ``POST /credentials`` for creating ec2 credential."""
- ref = self.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id)
- blob = {"access": uuid.uuid4().hex,
- "secret": uuid.uuid4().hex}
- ref['blob'] = json.dumps(blob)
- ref['type'] = 'ec2'
- r = self.post(
- '/credentials',
- body={'credential': ref})
+ blob, ref = unit.new_ec2_credential(user_id=self.user['id'],
+ project_id=self.project_id)
+ r = self.post('/credentials', body={'credential': ref})
self.assertValidCredentialResponse(r, ref)
# Assert credential id is same as hash of access key id for
# ec2 credentials
- self.assertEqual(r.result['credential']['id'],
- hashlib.sha256(blob['access']).hexdigest())
+ access = blob['access'].encode('utf-8')
+ self.assertEqual(hashlib.sha256(access).hexdigest(),
+ r.result['credential']['id'])
# Create second ec2 credential with the same access key id and check
# for conflict.
self.post(
'/credentials',
- body={'credential': ref}, expected_status=409)
+ body={'credential': ref}, expected_status=http_client.CONFLICT)
def test_get_ec2_dict_blob(self):
"""Ensure non-JSON blob data is correctly converted."""
@@ -215,7 +202,11 @@ class CredentialTestCase(CredentialBaseTestCase):
r = self.get(
'/credentials/%(credential_id)s' % {
'credential_id': credential_id})
- self.assertEqual(expected_blob, r.result['credential']['blob'])
+
+ # use json.loads to transform the blobs back into Python dictionaries
+ # to avoid problems with the keys being in different orders.
+ self.assertEqual(json.loads(expected_blob),
+ json.loads(r.result['credential']['blob']))
def test_list_ec2_dict_blob(self):
"""Ensure non-JSON blob data is correctly converted."""
@@ -225,47 +216,49 @@ class CredentialTestCase(CredentialBaseTestCase):
list_creds = list_r.result['credentials']
list_ids = [r['id'] for r in list_creds]
self.assertIn(credential_id, list_ids)
+ # use json.loads to transform the blobs back into Python dictionaries
+ # to avoid problems with the keys being in different orders.
for r in list_creds:
if r['id'] == credential_id:
- self.assertEqual(expected_blob, r['blob'])
+ self.assertEqual(json.loads(expected_blob),
+ json.loads(r['blob']))
def test_create_non_ec2_credential(self):
- """Call ``POST /credentials`` for creating non-ec2 credential."""
- ref = self.new_credential_ref(user_id=self.user['id'])
- blob = {"access": uuid.uuid4().hex,
- "secret": uuid.uuid4().hex}
- ref['blob'] = json.dumps(blob)
- r = self.post(
- '/credentials',
- body={'credential': ref})
+ """Test creating non-ec2 credential.
+
+ Call ``POST /credentials``.
+ """
+ blob, ref = unit.new_cert_credential(user_id=self.user['id'])
+
+ r = self.post('/credentials', body={'credential': ref})
self.assertValidCredentialResponse(r, ref)
# Assert credential id is not same as hash of access key id for
# non-ec2 credentials
- self.assertNotEqual(r.result['credential']['id'],
- hashlib.sha256(blob['access']).hexdigest())
+ access = blob['access'].encode('utf-8')
+ self.assertNotEqual(hashlib.sha256(access).hexdigest(),
+ r.result['credential']['id'])
def test_create_ec2_credential_with_missing_project_id(self):
- """Call ``POST /credentials`` for creating ec2
- credential with missing project_id.
+ """Test Creating ec2 credential with missing project_id.
+
+ Call ``POST /credentials``.
"""
- ref = self.new_credential_ref(user_id=self.user['id'])
- blob = {"access": uuid.uuid4().hex,
- "secret": uuid.uuid4().hex}
- ref['blob'] = json.dumps(blob)
- ref['type'] = 'ec2'
+ _, ref = unit.new_ec2_credential(user_id=self.user['id'],
+ project_id=None)
# Assert bad request status when missing project_id
self.post(
'/credentials',
body={'credential': ref}, expected_status=http_client.BAD_REQUEST)
def test_create_ec2_credential_with_invalid_blob(self):
- """Call ``POST /credentials`` for creating ec2
- credential with invalid blob.
+ """Test creating ec2 credential with invalid blob.
+
+ Call ``POST /credentials``.
"""
- ref = self.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id)
- ref['blob'] = '{"abc":"def"d}'
- ref['type'] = 'ec2'
+ ref = unit.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id,
+ blob='{"abc":"def"d}',
+ type=CRED_TYPE_EC2)
# Assert bad request status when request contains invalid blob
response = self.post(
'/credentials',
@@ -274,20 +267,21 @@ class CredentialTestCase(CredentialBaseTestCase):
def test_create_credential_with_admin_token(self):
# Make sure we can create credential with the static admin token
- ref = self.new_credential_ref(user_id=self.user['id'])
+ ref = unit.new_credential_ref(user_id=self.user['id'])
r = self.post(
'/credentials',
body={'credential': ref},
- token=CONF.admin_token)
+ token=self.get_admin_token())
self.assertValidCredentialResponse(r, ref)
class TestCredentialTrustScoped(test_v3.RestfulTestCase):
"""Test credential with trust scoped token."""
+
def setUp(self):
super(TestCredentialTrustScoped, self).setUp()
- self.trustee_user = self.new_user_ref(domain_id=self.domain_id)
+ self.trustee_user = unit.new_user_ref(domain_id=self.domain_id)
password = self.trustee_user['password']
self.trustee_user = self.identity_api.create_user(self.trustee_user)
self.trustee_user['password'] = password
@@ -298,9 +292,12 @@ class TestCredentialTrustScoped(test_v3.RestfulTestCase):
self.config_fixture.config(group='trust', enabled=True)
def test_trust_scoped_ec2_credential(self):
- """Call ``POST /credentials`` for creating ec2 credential."""
+ """Test creating trust scoped ec2 credential.
+
+ Call ``POST /credentials``.
+ """
# Create the trust
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.trustee_user_id,
project_id=self.project_id,
@@ -316,22 +313,15 @@ class TestCredentialTrustScoped(test_v3.RestfulTestCase):
user_id=self.trustee_user['id'],
password=self.trustee_user['password'],
trust_id=trust['id'])
- r = self.v3_authenticate_token(auth_data)
- self.assertValidProjectTrustScopedTokenResponse(r, self.user)
+ r = self.v3_create_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(r, self.user)
trust_id = r.result['token']['OS-TRUST:trust']['id']
token_id = r.headers.get('X-Subject-Token')
# Create the credential with the trust scoped token
- ref = self.new_credential_ref(user_id=self.user['id'],
- project_id=self.project_id)
- blob = {"access": uuid.uuid4().hex,
- "secret": uuid.uuid4().hex}
- ref['blob'] = json.dumps(blob)
- ref['type'] = 'ec2'
- r = self.post(
- '/credentials',
- body={'credential': ref},
- token=token_id)
+ blob, ref = unit.new_ec2_credential(user_id=self.user['id'],
+ project_id=self.project_id)
+ r = self.post('/credentials', body={'credential': ref}, token=token_id)
# We expect the response blob to contain the trust_id
ret_ref = ref.copy()
@@ -342,8 +332,9 @@ class TestCredentialTrustScoped(test_v3.RestfulTestCase):
# Assert credential id is same as hash of access key id for
# ec2 credentials
- self.assertEqual(r.result['credential']['id'],
- hashlib.sha256(blob['access']).hexdigest())
+ access = blob['access'].encode('utf-8')
+ self.assertEqual(hashlib.sha256(access).hexdigest(),
+ r.result['credential']['id'])
# Create second ec2 credential with the same access key id and check
# for conflict.
@@ -351,11 +342,12 @@ class TestCredentialTrustScoped(test_v3.RestfulTestCase):
'/credentials',
body={'credential': ref},
token=token_id,
- expected_status=409)
+ expected_status=http_client.CONFLICT)
class TestCredentialEc2(CredentialBaseTestCase):
"""Test v3 credential compatibility with ec2tokens."""
+
def setUp(self):
super(TestCredentialEc2, self).setUp()
@@ -382,25 +374,19 @@ class TestCredentialEc2(CredentialBaseTestCase):
r = self.post(
'/ec2tokens',
body={'ec2Credentials': sig_ref},
- expected_status=200)
+ expected_status=http_client.OK)
self.assertValidTokenResponse(r)
def test_ec2_credential_signature_validate(self):
"""Test signature validation with a v3 ec2 credential."""
- ref = self.new_credential_ref(
- user_id=self.user['id'],
- project_id=self.project_id)
- blob = {"access": uuid.uuid4().hex,
- "secret": uuid.uuid4().hex}
- ref['blob'] = json.dumps(blob)
- ref['type'] = 'ec2'
- r = self.post(
- '/credentials',
- body={'credential': ref})
+ blob, ref = unit.new_ec2_credential(user_id=self.user['id'],
+ project_id=self.project_id)
+ r = self.post('/credentials', body={'credential': ref})
self.assertValidCredentialResponse(r, ref)
# Assert credential id is same as hash of access key id
- self.assertEqual(r.result['credential']['id'],
- hashlib.sha256(blob['access']).hexdigest())
+ access = blob['access'].encode('utf-8')
+ self.assertEqual(hashlib.sha256(access).hexdigest(),
+ r.result['credential']['id'])
cred_blob = json.loads(r.result['credential']['blob'])
self.assertEqual(blob, cred_blob)
@@ -409,7 +395,7 @@ class TestCredentialEc2(CredentialBaseTestCase):
def test_ec2_credential_signature_validate_legacy(self):
"""Test signature validation with a legacy v3 ec2 credential."""
- cred_json, credential_id = self._create_dict_blob_credential()
+ cred_json, _ = self._create_dict_blob_credential()
cred_blob = json.loads(cred_json)
self._validate_signature(access=cred_blob['access'],
secret=cred_blob['secret'])
@@ -442,6 +428,19 @@ class TestCredentialEc2(CredentialBaseTestCase):
self.assertThat(ec2_cred['links']['self'],
matchers.EndsWith(uri))
+ def test_ec2_cannot_get_non_ec2_credential(self):
+ access_key = uuid.uuid4().hex
+ cred_id = utils.hash_access_key(access_key)
+ non_ec2_cred = unit.new_credential_ref(
+ user_id=self.user_id,
+ project_id=self.project_id)
+ non_ec2_cred['id'] = cred_id
+ self.credential_api.create_credential(cred_id, non_ec2_cred)
+ uri = '/'.join([self._get_ec2_cred_uri(), access_key])
+ # if access_key is not found, ec2 controller raises Unauthorized
+ # exception
+ self.get(uri, expected_status=http_client.UNAUTHORIZED)
+
def test_ec2_list_credentials(self):
"""Test ec2 credential listing."""
self._get_ec2_cred()
@@ -452,13 +451,26 @@ class TestCredentialEc2(CredentialBaseTestCase):
self.assertThat(r.result['links']['self'],
matchers.EndsWith(uri))
+ # non-EC2 credentials won't be fetched
+ non_ec2_cred = unit.new_credential_ref(
+ user_id=self.user_id,
+ project_id=self.project_id)
+ non_ec2_cred['type'] = uuid.uuid4().hex
+ self.credential_api.create_credential(non_ec2_cred['id'],
+ non_ec2_cred)
+ r = self.get(uri)
+ cred_list_2 = r.result['credentials']
+ # still one element because non-EC2 credentials are not returned.
+ self.assertEqual(1, len(cred_list_2))
+ self.assertEqual(cred_list[0], cred_list_2[0])
+
def test_ec2_delete_credential(self):
"""Test ec2 credential deletion."""
ec2_cred = self._get_ec2_cred()
uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']])
cred_from_credential_api = (
self.credential_api
- .list_credentials_for_user(self.user_id))
+ .list_credentials_for_user(self.user_id, type=CRED_TYPE_EC2))
self.assertEqual(1, len(cred_from_credential_api))
self.delete(uri)
self.assertRaises(exception.CredentialNotFound,
diff --git a/keystone-moon/keystone/tests/unit/test_v3_domain_config.py b/keystone-moon/keystone/tests/unit/test_v3_domain_config.py
index 701cd3cf..ee716081 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_domain_config.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_domain_config.py
@@ -17,6 +17,7 @@ from oslo_config import cfg
from six.moves import http_client
from keystone import exception
+from keystone.tests import unit
from keystone.tests.unit import test_v3
@@ -29,7 +30,7 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
def setUp(self):
super(DomainConfigTestCase, self).setUp()
- self.domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ self.domain = unit.new_domain_ref()
self.resource_api.create_domain(self.domain['id'], self.domain)
self.config = {'ldap': {'url': uuid.uuid4().hex,
'user_tree_dn': uuid.uuid4().hex},
@@ -40,21 +41,34 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
url = '/domains/%(domain_id)s/config' % {
'domain_id': self.domain['id']}
r = self.put(url, body={'config': self.config},
- expected_status=201)
+ expected_status=http_client.CREATED)
res = self.domain_config_api.get_config(self.domain['id'])
self.assertEqual(self.config, r.result['config'])
self.assertEqual(self.config, res)
+ def test_create_config_invalid_domain(self):
+ """Call ``PUT /domains/{domain_id}/config``
+
+ While creating Identity API-based domain config with an invalid domain
+ id provided, the request shall be rejected with a response, 404 domain
+ not found.
+ """
+ invalid_domain_id = uuid.uuid4().hex
+ url = '/domains/%(domain_id)s/config' % {
+ 'domain_id': invalid_domain_id}
+ self.put(url, body={'config': self.config},
+ expected_status=exception.DomainNotFound.code)
+
def test_create_config_twice(self):
"""Check multiple creates don't throw error"""
self.put('/domains/%(domain_id)s/config' % {
'domain_id': self.domain['id']},
body={'config': self.config},
- expected_status=201)
+ expected_status=http_client.CREATED)
self.put('/domains/%(domain_id)s/config' % {
'domain_id': self.domain['id']},
body={'config': self.config},
- expected_status=200)
+ expected_status=http_client.OK)
def test_delete_config(self):
"""Call ``DELETE /domains{domain_id}/config``."""
@@ -65,6 +79,19 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
'domain_id': self.domain['id']},
expected_status=exception.DomainConfigNotFound.code)
+ def test_delete_config_invalid_domain(self):
+ """Call ``DELETE /domains{domain_id}/config``
+
+ While deleting Identity API-based domain config with an invalid domain
+ id provided, the request shall be rejected with a response, 404 domain
+ not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ invalid_domain_id = uuid.uuid4().hex
+ self.delete('/domains/%(domain_id)s/config' % {
+ 'domain_id': invalid_domain_id},
+ expected_status=exception.DomainNotFound.code)
+
def test_delete_config_by_group(self):
"""Call ``DELETE /domains{domain_id}/config/{group}``."""
self.domain_config_api.create_config(self.domain['id'], self.config)
@@ -73,6 +100,19 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
res = self.domain_config_api.get_config(self.domain['id'])
self.assertNotIn('ldap', res)
+ def test_delete_config_by_group_invalid_domain(self):
+ """Call ``DELETE /domains{domain_id}/config/{group}``
+
+ While deleting Identity API-based domain config by group with an
+ invalid domain id provided, the request shall be rejected with a
+ response 404 domain not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ invalid_domain_id = uuid.uuid4().hex
+ self.delete('/domains/%(domain_id)s/config/ldap' % {
+ 'domain_id': invalid_domain_id},
+ expected_status=exception.DomainNotFound.code)
+
def test_get_head_config(self):
"""Call ``GET & HEAD for /domains{domain_id}/config``."""
self.domain_config_api.create_config(self.domain['id'], self.config)
@@ -80,7 +120,7 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
'domain_id': self.domain['id']}
r = self.get(url)
self.assertEqual(self.config, r.result['config'])
- self.head(url, expected_status=200)
+ self.head(url, expected_status=http_client.OK)
def test_get_config_by_group(self):
"""Call ``GET & HEAD /domains{domain_id}/config/{group}``."""
@@ -89,7 +129,20 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
'domain_id': self.domain['id']}
r = self.get(url)
self.assertEqual({'ldap': self.config['ldap']}, r.result['config'])
- self.head(url, expected_status=200)
+ self.head(url, expected_status=http_client.OK)
+
+ def test_get_config_by_group_invalid_domain(self):
+ """Call ``GET & HEAD /domains{domain_id}/config/{group}``
+
+ While retrieving Identity API-based domain config by group with an
+ invalid domain id provided, the request shall be rejected with a
+ response 404 domain not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ invalid_domain_id = uuid.uuid4().hex
+ self.get('/domains/%(domain_id)s/config/ldap' % {
+ 'domain_id': invalid_domain_id},
+ expected_status=exception.DomainNotFound.code)
def test_get_config_by_option(self):
"""Call ``GET & HEAD /domains{domain_id}/config/{group}/{option}``."""
@@ -99,7 +152,20 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
r = self.get(url)
self.assertEqual({'url': self.config['ldap']['url']},
r.result['config'])
- self.head(url, expected_status=200)
+ self.head(url, expected_status=http_client.OK)
+
+ def test_get_config_by_option_invalid_domain(self):
+ """Call ``GET & HEAD /domains{domain_id}/config/{group}/{option}``
+
+ While retrieving Identity API-based domain config by option with an
+ invalid domain id provided, the request shall be rejected with a
+ response 404 domain not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ invalid_domain_id = uuid.uuid4().hex
+ self.get('/domains/%(domain_id)s/config/ldap/url' % {
+ 'domain_id': invalid_domain_id},
+ expected_status=exception.DomainNotFound.code)
def test_get_non_existant_config(self):
"""Call ``GET /domains{domain_id}/config when no config defined``."""
@@ -107,6 +173,18 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
'domain_id': self.domain['id']},
expected_status=http_client.NOT_FOUND)
+ def test_get_non_existant_config_invalid_domain(self):
+ """Call ``GET /domains{domain_id}/config when no config defined``
+
+ While retrieving non-existent Identity API-based domain config with an
+ invalid domain id provided, the request shall be rejected with a
+ response 404 domain not found.
+ """
+ invalid_domain_id = uuid.uuid4().hex
+ self.get('/domains/%(domain_id)s/config' % {
+ 'domain_id': invalid_domain_id},
+ expected_status=exception.DomainNotFound.code)
+
def test_get_non_existant_config_group(self):
"""Call ``GET /domains{domain_id}/config/{group_not_exist}``."""
config = {'ldap': {'url': uuid.uuid4().hex}}
@@ -115,6 +193,20 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
'domain_id': self.domain['id']},
expected_status=http_client.NOT_FOUND)
+ def test_get_non_existant_config_group_invalid_domain(self):
+ """Call ``GET /domains{domain_id}/config/{group_not_exist}``
+
+ While retrieving non-existent Identity API-based domain config group
+ with an invalid domain id provided, the request shall be rejected with
+ a response, 404 domain not found.
+ """
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ invalid_domain_id = uuid.uuid4().hex
+ self.get('/domains/%(domain_id)s/config/identity' % {
+ 'domain_id': invalid_domain_id},
+ expected_status=exception.DomainNotFound.code)
+
def test_get_non_existant_config_option(self):
"""Call ``GET /domains{domain_id}/config/group/{option_not_exist}``."""
config = {'ldap': {'url': uuid.uuid4().hex}}
@@ -123,6 +215,20 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
'domain_id': self.domain['id']},
expected_status=http_client.NOT_FOUND)
+ def test_get_non_existant_config_option_invalid_domain(self):
+ """Call ``GET /domains{domain_id}/config/group/{option_not_exist}``
+
+ While retrieving non-existent Identity API-based domain config option
+ with an invalid domain id provided, the request shall be rejected with
+ a response, 404 domain not found.
+ """
+ config = {'ldap': {'url': uuid.uuid4().hex}}
+ self.domain_config_api.create_config(self.domain['id'], config)
+ invalid_domain_id = uuid.uuid4().hex
+ self.get('/domains/%(domain_id)s/config/ldap/user_tree_dn' % {
+ 'domain_id': invalid_domain_id},
+ expected_status=exception.DomainNotFound.code)
+
def test_update_config(self):
"""Call ``PATCH /domains/{domain_id}/config``."""
self.domain_config_api.create_config(self.domain['id'], self.config)
@@ -139,6 +245,22 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
self.assertEqual(expected_config, r.result['config'])
self.assertEqual(expected_config, res)
+ def test_update_config_invalid_domain(self):
+ """Call ``PATCH /domains/{domain_id}/config``
+
+ While updating Identity API-based domain config with an invalid domain
+ id provided, the request shall be rejected with a response, 404 domain
+ not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ new_config = {'ldap': {'url': uuid.uuid4().hex},
+ 'identity': {'driver': uuid.uuid4().hex}}
+ invalid_domain_id = uuid.uuid4().hex
+ self.patch('/domains/%(domain_id)s/config' % {
+ 'domain_id': invalid_domain_id},
+ body={'config': new_config},
+ expected_status=exception.DomainNotFound.code)
+
def test_update_config_group(self):
"""Call ``PATCH /domains/{domain_id}/config/{group}``."""
self.domain_config_api.create_config(self.domain['id'], self.config)
@@ -155,6 +277,22 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
self.assertEqual(expected_config, r.result['config'])
self.assertEqual(expected_config, res)
+ def test_update_config_group_invalid_domain(self):
+ """Call ``PATCH /domains/{domain_id}/config/{group}``
+
+ While updating Identity API-based domain config group with an invalid
+ domain id provided, the request shall be rejected with a response,
+ 404 domain not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ new_config = {'ldap': {'url': uuid.uuid4().hex,
+ 'user_filter': uuid.uuid4().hex}}
+ invalid_domain_id = uuid.uuid4().hex
+ self.patch('/domains/%(domain_id)s/config/ldap' % {
+ 'domain_id': invalid_domain_id},
+ body={'config': new_config},
+ expected_status=exception.DomainNotFound.code)
+
def test_update_config_invalid_group(self):
"""Call ``PATCH /domains/{domain_id}/config/{invalid_group}``."""
self.domain_config_api.create_config(self.domain['id'], self.config)
@@ -178,6 +316,24 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
body={'config': new_config},
expected_status=http_client.NOT_FOUND)
+ def test_update_config_invalid_group_invalid_domain(self):
+ """Call ``PATCH /domains/{domain_id}/config/{invalid_group}``
+
+ While updating Identity API-based domain config with an invalid group
+ and an invalid domain id provided, the request shall be rejected
+ with a response, 404 domain not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ invalid_group = uuid.uuid4().hex
+ new_config = {invalid_group: {'url': uuid.uuid4().hex,
+ 'user_filter': uuid.uuid4().hex}}
+ invalid_domain_id = uuid.uuid4().hex
+ self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % {
+ 'domain_id': invalid_domain_id,
+ 'invalid_group': invalid_group},
+ body={'config': new_config},
+ expected_status=exception.DomainNotFound.code)
+
def test_update_config_option(self):
"""Call ``PATCH /domains/{domain_id}/config/{group}/{option}``."""
self.domain_config_api.create_config(self.domain['id'], self.config)
@@ -191,6 +347,21 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
self.assertEqual(expected_config, r.result['config'])
self.assertEqual(expected_config, res)
+ def test_update_config_option_invalid_domain(self):
+ """Call ``PATCH /domains/{domain_id}/config/{group}/{option}``
+
+ While updating Identity API-based domain config option with an invalid
+ domain id provided, the request shall be rejected with a response, 404
+ domain not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ new_config = {'url': uuid.uuid4().hex}
+ invalid_domain_id = uuid.uuid4().hex
+ self.patch('/domains/%(domain_id)s/config/ldap/url' % {
+ 'domain_id': invalid_domain_id},
+ body={'config': new_config},
+ expected_status=exception.DomainNotFound.code)
+
def test_update_config_invalid_option(self):
"""Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``."""
self.domain_config_api.create_config(self.domain['id'], self.config)
@@ -212,3 +383,77 @@ class DomainConfigTestCase(test_v3.RestfulTestCase):
'domain_id': self.domain['id']},
body={'config': new_config},
expected_status=http_client.NOT_FOUND)
+
+ def test_update_config_invalid_option_invalid_domain(self):
+ """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``
+
+ While updating Identity API-based domain config with an invalid option
+ and an invalid domain id provided, the request shall be rejected
+ with a response, 404 domain not found.
+ """
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ invalid_option = uuid.uuid4().hex
+ new_config = {'ldap': {invalid_option: uuid.uuid4().hex}}
+ invalid_domain_id = uuid.uuid4().hex
+ self.patch(
+ '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % {
+ 'domain_id': invalid_domain_id,
+ 'invalid_option': invalid_option},
+ body={'config': new_config},
+ expected_status=exception.DomainNotFound.code)
+
+ def test_get_config_default(self):
+ """Call ``GET /domains/config/default``."""
+ # Create a config that overrides a few of the options so that we can
+ # check that only the defaults are returned.
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ url = '/domains/config/default'
+ r = self.get(url)
+ default_config = r.result['config']
+ for group in default_config:
+ for option in default_config[group]:
+ self.assertEqual(getattr(getattr(CONF, group), option),
+ default_config[group][option])
+
+ def test_get_config_default_by_group(self):
+ """Call ``GET /domains/config/{group}/default``."""
+ # Create a config that overrides a few of the options so that we can
+ # check that only the defaults are returned.
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ url = '/domains/config/ldap/default'
+ r = self.get(url)
+ default_config = r.result['config']
+ for option in default_config['ldap']:
+ self.assertEqual(getattr(CONF.ldap, option),
+ default_config['ldap'][option])
+
+ def test_get_config_default_by_option(self):
+ """Call ``GET /domains/config/{group}/{option}/default``."""
+ # Create a config that overrides a few of the options so that we can
+ # check that only the defaults are returned.
+ self.domain_config_api.create_config(self.domain['id'], self.config)
+ url = '/domains/config/ldap/url/default'
+ r = self.get(url)
+ default_config = r.result['config']
+ self.assertEqual(CONF.ldap.url, default_config['url'])
+
+ def test_get_config_default_by_invalid_group(self):
+ """Call ``GET for /domains/config/{bad-group}/default``."""
+ # First try a valid group, but one we don't support for domain config
+ self.get('/domains/config/resouce/default',
+ expected_status=http_client.FORBIDDEN)
+
+ # Now try a totally invalid group
+ url = '/domains/config/%s/default' % uuid.uuid4().hex
+ self.get(url, expected_status=http_client.FORBIDDEN)
+
+ def test_get_config_default_by_invalid_option(self):
+ """Call ``GET for /domains/config/{group}/{bad-option}/default``."""
+ # First try a valid option, but one we don't support for domain config,
+ # i.e. one that is in the sensitive options list
+ self.get('/domains/config/ldap/password/default',
+ expected_status=http_client.FORBIDDEN)
+
+ # Now try a totally invalid option
+ url = '/domains/config/ldap/%s/default' % uuid.uuid4().hex
+ self.get(url, expected_status=http_client.FORBIDDEN)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
index 3423d2d8..9fee8d2b 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py
@@ -15,6 +15,7 @@
from six.moves import http_client
from testtools import matchers
+from keystone.tests import unit
from keystone.tests.unit import test_v3
@@ -31,13 +32,15 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
def setUp(self):
super(EndpointPolicyTestCase, self).setUp()
- self.policy = self.new_policy_ref()
+ self.policy = unit.new_policy_ref()
self.policy_api.create_policy(self.policy['id'], self.policy)
- self.service = self.new_service_ref()
+ self.service = unit.new_service_ref()
self.catalog_api.create_service(self.service['id'], self.service)
- self.endpoint = self.new_endpoint_ref(self.service['id'], enabled=True)
+ self.endpoint = unit.new_endpoint_ref(self.service['id'], enabled=True,
+ interface='public',
+ region_id=self.region_id)
self.catalog_api.create_endpoint(self.endpoint['id'], self.endpoint)
- self.region = self.new_region_ref()
+ self.region = unit.new_region_ref()
self.catalog_api.create_region(self.region)
def assert_head_and_get_return_same_response(self, url, expected_status):
@@ -53,12 +56,14 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
url,
expected_status=http_client.NOT_FOUND)
- self.put(url, expected_status=204)
+ self.put(url)
# test that the new resource is accessible.
- self.assert_head_and_get_return_same_response(url, expected_status=204)
+ self.assert_head_and_get_return_same_response(
+ url,
+ expected_status=http_client.NO_CONTENT)
- self.delete(url, expected_status=204)
+ self.delete(url)
# test that the deleted resource is no longer accessible
self.assert_head_and_get_return_same_response(
@@ -67,7 +72,6 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
def test_crud_for_policy_for_explicit_endpoint(self):
"""PUT, HEAD and DELETE for explicit endpoint policy."""
-
url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
'/endpoints/%(endpoint_id)s') % {
'policy_id': self.policy['id'],
@@ -76,7 +80,6 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
def test_crud_for_policy_for_service(self):
"""PUT, HEAD and DELETE for service endpoint policy."""
-
url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
'/services/%(service_id)s') % {
'policy_id': self.policy['id'],
@@ -85,7 +88,6 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
def test_crud_for_policy_for_region_and_service(self):
"""PUT, HEAD and DELETE for region and service endpoint policy."""
-
url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
'/services/%(service_id)s/regions/%(region_id)s') % {
'policy_id': self.policy['id'],
@@ -95,37 +97,31 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
def test_get_policy_for_endpoint(self):
"""GET /endpoints/{endpoint_id}/policy."""
-
self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
'/endpoints/%(endpoint_id)s' % {
'policy_id': self.policy['id'],
- 'endpoint_id': self.endpoint['id']},
- expected_status=204)
+ 'endpoint_id': self.endpoint['id']})
self.head('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
'/policy' % {
'endpoint_id': self.endpoint['id']},
- expected_status=200)
+ expected_status=http_client.OK)
r = self.get('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY'
'/policy' % {
- 'endpoint_id': self.endpoint['id']},
- expected_status=200)
+ 'endpoint_id': self.endpoint['id']})
self.assertValidPolicyResponse(r, ref=self.policy)
def test_list_endpoints_for_policy(self):
"""GET /policies/%(policy_id}/endpoints."""
-
self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
'/endpoints/%(endpoint_id)s' % {
'policy_id': self.policy['id'],
- 'endpoint_id': self.endpoint['id']},
- expected_status=204)
+ 'endpoint_id': self.endpoint['id']})
r = self.get('/policies/%(policy_id)s/OS-ENDPOINT-POLICY'
'/endpoints' % {
- 'policy_id': self.policy['id']},
- expected_status=200)
+ 'policy_id': self.policy['id']})
self.assertValidEndpointListResponse(r, ref=self.endpoint)
self.assertThat(r.result.get('endpoints'), matchers.HasLength(1))
@@ -135,8 +131,8 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
'policy_id': self.policy['id'],
'endpoint_id': self.endpoint['id']}
- self.put(url, expected_status=204)
- self.head(url, expected_status=204)
+ self.put(url)
+ self.head(url)
self.delete('/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint['id']})
@@ -150,8 +146,8 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
'service_id': self.service['id'],
'region_id': self.region['id']}
- self.put(url, expected_status=204)
- self.head(url, expected_status=204)
+ self.put(url)
+ self.head(url)
self.delete('/regions/%(region_id)s' % {
'region_id': self.region['id']})
@@ -165,8 +161,8 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
'service_id': self.service['id'],
'region_id': self.region['id']}
- self.put(url, expected_status=204)
- self.head(url, expected_status=204)
+ self.put(url)
+ self.head(url)
self.delete('/services/%(service_id)s' % {
'service_id': self.service['id']})
@@ -179,8 +175,8 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
'policy_id': self.policy['id'],
'service_id': self.service['id']}
- self.put(url, expected_status=204)
- self.get(url, expected_status=204)
+ self.put(url)
+ self.get(url, expected_status=http_client.NO_CONTENT)
self.delete('/policies/%(policy_id)s' % {
'policy_id': self.policy['id']})
@@ -193,8 +189,8 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
'policy_id': self.policy['id'],
'service_id': self.service['id']}
- self.put(url, expected_status=204)
- self.get(url, expected_status=204)
+ self.put(url)
+ self.get(url, expected_status=http_client.NO_CONTENT)
self.delete('/services/%(service_id)s' % {
'service_id': self.service['id']})
diff --git a/keystone-moon/keystone/tests/unit/test_v3_federation.py b/keystone-moon/keystone/tests/unit/test_v3_federation.py
index 4d7dcaab..f4ec8e51 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_federation.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_federation.py
@@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
+import copy
import os
import random
from testtools import matchers
@@ -19,7 +20,8 @@ import fixtures
from lxml import etree
import mock
from oslo_config import cfg
-from oslo_log import log
+from oslo_log import versionutils
+from oslo_serialization import jsonutils
from oslo_utils import importutils
from oslotest import mockpatch
import saml2
@@ -33,22 +35,24 @@ if not xmldsig:
from keystone.auth import controllers as auth_controllers
from keystone.common import environment
-from keystone.contrib.federation import controllers as federation_controllers
-from keystone.contrib.federation import idp as keystone_idp
+from keystone.contrib.federation import routers
from keystone import exception
+from keystone.federation import controllers as federation_controllers
+from keystone.federation import idp as keystone_idp
from keystone import notifications
+from keystone.tests import unit
from keystone.tests.unit import core
from keystone.tests.unit import federation_fixtures
from keystone.tests.unit import ksfixtures
from keystone.tests.unit import mapping_fixtures
from keystone.tests.unit import test_v3
+from keystone.tests.unit import utils
from keystone.token.providers import common as token_common
subprocess = environment.subprocess
CONF = cfg.CONF
-LOG = log.getLogger(__name__)
ROOTDIR = os.path.dirname(os.path.abspath(__file__))
XMLDIR = os.path.join(ROOTDIR, 'saml2/')
@@ -59,8 +63,12 @@ def dummy_validator(*args, **kwargs):
class FederationTests(test_v3.RestfulTestCase):
- EXTENSION_NAME = 'federation'
- EXTENSION_TO_ADD = 'federation_extension'
+ @mock.patch.object(versionutils, 'report_deprecated_feature')
+ def test_exception_happens(self, mock_deprecator):
+ routers.FederationExtension(mock.ANY)
+ mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("Remove federation_extension from", args[1])
class FederatedSetupMixin(object):
@@ -137,7 +145,6 @@ class FederatedSetupMixin(object):
def assertValidMappedUser(self, token):
"""Check if user object meets all the criteria."""
-
user = token['user']
self.assertIn('id', user)
self.assertIn('name', user)
@@ -209,66 +216,62 @@ class FederatedSetupMixin(object):
def load_federation_sample_data(self):
"""Inject additional data."""
-
# Create and add domains
- self.domainA = self.new_domain_ref()
+ self.domainA = unit.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'],
self.domainA)
- self.domainB = self.new_domain_ref()
+ self.domainB = unit.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'],
self.domainB)
- self.domainC = self.new_domain_ref()
+ self.domainC = unit.new_domain_ref()
self.resource_api.create_domain(self.domainC['id'],
self.domainC)
- self.domainD = self.new_domain_ref()
+ self.domainD = unit.new_domain_ref()
self.resource_api.create_domain(self.domainD['id'],
self.domainD)
# Create and add projects
- self.proj_employees = self.new_project_ref(
+ self.proj_employees = unit.new_project_ref(
domain_id=self.domainA['id'])
self.resource_api.create_project(self.proj_employees['id'],
self.proj_employees)
- self.proj_customers = self.new_project_ref(
+ self.proj_customers = unit.new_project_ref(
domain_id=self.domainA['id'])
self.resource_api.create_project(self.proj_customers['id'],
self.proj_customers)
- self.project_all = self.new_project_ref(
+ self.project_all = unit.new_project_ref(
domain_id=self.domainA['id'])
self.resource_api.create_project(self.project_all['id'],
self.project_all)
- self.project_inherited = self.new_project_ref(
+ self.project_inherited = unit.new_project_ref(
domain_id=self.domainD['id'])
self.resource_api.create_project(self.project_inherited['id'],
self.project_inherited)
# Create and add groups
- self.group_employees = self.new_group_ref(
- domain_id=self.domainA['id'])
+ self.group_employees = unit.new_group_ref(domain_id=self.domainA['id'])
self.group_employees = (
self.identity_api.create_group(self.group_employees))
- self.group_customers = self.new_group_ref(
- domain_id=self.domainA['id'])
+ self.group_customers = unit.new_group_ref(domain_id=self.domainA['id'])
self.group_customers = (
self.identity_api.create_group(self.group_customers))
- self.group_admins = self.new_group_ref(
- domain_id=self.domainA['id'])
+ self.group_admins = unit.new_group_ref(domain_id=self.domainA['id'])
self.group_admins = self.identity_api.create_group(self.group_admins)
# Create and add roles
- self.role_employee = self.new_role_ref()
+ self.role_employee = unit.new_role_ref()
self.role_api.create_role(self.role_employee['id'], self.role_employee)
- self.role_customer = self.new_role_ref()
+ self.role_customer = unit.new_role_ref()
self.role_api.create_role(self.role_customer['id'], self.role_customer)
- self.role_admin = self.new_role_ref()
+ self.role_admin = unit.new_role_ref()
self.role_api.create_role(self.role_admin['id'], self.role_admin)
# Employees can access
@@ -774,7 +777,7 @@ class FederatedSetupMixin(object):
self.domainC['id'])
-class FederatedIdentityProviderTests(FederationTests):
+class FederatedIdentityProviderTests(test_v3.RestfulTestCase):
"""A test class for Identity Providers."""
idp_keys = ['description', 'enabled']
@@ -815,7 +818,7 @@ class FederatedIdentityProviderTests(FederationTests):
if body is None:
body = self._http_idp_input()
resp = self.put(url, body={'identity_provider': body},
- expected_status=201)
+ expected_status=http_client.CREATED)
return resp
def _http_idp_input(self, **kwargs):
@@ -856,7 +859,6 @@ class FederatedIdentityProviderTests(FederationTests):
def test_create_idp(self):
"""Creates the IdentityProvider entity associated to remote_ids."""
-
keys_to_check = list(self.idp_keys)
body = self.default_body.copy()
body['description'] = uuid.uuid4().hex
@@ -867,7 +869,6 @@ class FederatedIdentityProviderTests(FederationTests):
def test_create_idp_remote(self):
"""Creates the IdentityProvider entity associated to remote_ids."""
-
keys_to_check = list(self.idp_keys)
keys_to_check.append('remote_ids')
body = self.default_body.copy()
@@ -886,10 +887,9 @@ class FederatedIdentityProviderTests(FederationTests):
A remote_id is the same for both so the second IdP is not
created because of the uniqueness of the remote_ids
- Expect HTTP 409 code for the latter call.
+ Expect HTTP 409 Conflict code for the latter call.
"""
-
body = self.default_body.copy()
repeated_remote_id = uuid.uuid4().hex
body['remote_ids'] = [uuid.uuid4().hex,
@@ -901,12 +901,15 @@ class FederatedIdentityProviderTests(FederationTests):
url = self.base_url(suffix=uuid.uuid4().hex)
body['remote_ids'] = [uuid.uuid4().hex,
repeated_remote_id]
- self.put(url, body={'identity_provider': body},
- expected_status=http_client.CONFLICT)
+ resp = self.put(url, body={'identity_provider': body},
+ expected_status=http_client.CONFLICT)
+
+ resp_data = jsonutils.loads(resp.body)
+ self.assertIn('Duplicate remote ID',
+ resp_data.get('error', {}).get('message'))
def test_create_idp_remote_empty(self):
"""Creates an IdP with empty remote_ids."""
-
keys_to_check = list(self.idp_keys)
keys_to_check.append('remote_ids')
body = self.default_body.copy()
@@ -919,7 +922,6 @@ class FederatedIdentityProviderTests(FederationTests):
def test_create_idp_remote_none(self):
"""Creates an IdP with a None remote_ids."""
-
keys_to_check = list(self.idp_keys)
keys_to_check.append('remote_ids')
body = self.default_body.copy()
@@ -986,6 +988,37 @@ class FederatedIdentityProviderTests(FederationTests):
self.assertEqual(sorted(body['remote_ids']),
sorted(returned_idp.get('remote_ids')))
+ def test_update_idp_remote_repeated(self):
+ """Update an IdentityProvider entity reusing a remote_id.
+
+ A remote_id is the same for both so the second IdP is not
+ updated because of the uniqueness of the remote_ids.
+
+ Expect HTTP 409 Conflict code for the latter call.
+
+ """
+ # Create first identity provider
+ body = self.default_body.copy()
+ repeated_remote_id = uuid.uuid4().hex
+ body['remote_ids'] = [uuid.uuid4().hex,
+ repeated_remote_id]
+ self._create_default_idp(body=body)
+
+ # Create second identity provider (without remote_ids)
+ body = self.default_body.copy()
+ default_resp = self._create_default_idp(body=body)
+ default_idp = self._fetch_attribute_from_response(default_resp,
+ 'identity_provider')
+ idp_id = default_idp.get('id')
+ url = self.base_url(suffix=idp_id)
+
+ body['remote_ids'] = [repeated_remote_id]
+ resp = self.patch(url, body={'identity_provider': body},
+ expected_status=http_client.CONFLICT)
+ resp_data = jsonutils.loads(resp.body)
+ self.assertIn('Duplicate remote ID',
+ resp_data['error']['message'])
+
def test_list_idps(self, iterations=5):
"""Lists all available IdentityProviders.
@@ -1018,18 +1051,73 @@ class FederatedIdentityProviderTests(FederationTests):
ids_intersection = entities_ids.intersection(ids)
self.assertEqual(ids_intersection, ids)
+ def test_filter_list_idp_by_id(self):
+ def get_id(resp):
+ r = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ return r.get('id')
+
+ idp1_id = get_id(self._create_default_idp())
+ idp2_id = get_id(self._create_default_idp())
+
+ # list the IdP, should get two IdP.
+ url = self.base_url()
+ resp = self.get(url)
+ entities = self._fetch_attribute_from_response(resp,
+ 'identity_providers')
+ entities_ids = [e['id'] for e in entities]
+ self.assertItemsEqual(entities_ids, [idp1_id, idp2_id])
+
+ # filter the IdP by ID.
+ url = self.base_url() + '?id=' + idp1_id
+ resp = self.get(url)
+ filtered_service_list = resp.json['identity_providers']
+ self.assertThat(filtered_service_list, matchers.HasLength(1))
+ self.assertEqual(idp1_id, filtered_service_list[0].get('id'))
+
+ def test_filter_list_idp_by_enabled(self):
+ def get_id(resp):
+ r = self._fetch_attribute_from_response(resp,
+ 'identity_provider')
+ return r.get('id')
+
+ idp1_id = get_id(self._create_default_idp())
+
+ body = self.default_body.copy()
+ body['enabled'] = False
+ idp2_id = get_id(self._create_default_idp(body=body))
+
+ # list the IdP, should get two IdP.
+ url = self.base_url()
+ resp = self.get(url)
+ entities = self._fetch_attribute_from_response(resp,
+ 'identity_providers')
+ entities_ids = [e['id'] for e in entities]
+ self.assertItemsEqual(entities_ids, [idp1_id, idp2_id])
+
+ # filter the IdP by 'enabled'.
+ url = self.base_url() + '?enabled=True'
+ resp = self.get(url)
+ filtered_service_list = resp.json['identity_providers']
+ self.assertThat(filtered_service_list, matchers.HasLength(1))
+ self.assertEqual(idp1_id, filtered_service_list[0].get('id'))
+
def test_check_idp_uniqueness(self):
"""Add same IdP twice.
- Expect HTTP 409 code for the latter call.
+ Expect HTTP 409 Conflict code for the latter call.
"""
url = self.base_url(suffix=uuid.uuid4().hex)
body = self._http_idp_input()
self.put(url, body={'identity_provider': body},
- expected_status=201)
- self.put(url, body={'identity_provider': body},
- expected_status=http_client.CONFLICT)
+ expected_status=http_client.CREATED)
+ resp = self.put(url, body={'identity_provider': body},
+ expected_status=http_client.CONFLICT)
+
+ resp_data = jsonutils.loads(resp.body)
+ self.assertIn('Duplicate entry',
+ resp_data.get('error', {}).get('message'))
def test_get_idp(self):
"""Create and later fetch IdP."""
@@ -1047,7 +1135,7 @@ class FederatedIdentityProviderTests(FederationTests):
def test_get_nonexisting_idp(self):
"""Fetch nonexisting IdP entity.
- Expected HTTP 404 status code.
+ Expected HTTP 404 Not Found status code.
"""
idp_id = uuid.uuid4().hex
@@ -1059,7 +1147,7 @@ class FederatedIdentityProviderTests(FederationTests):
def test_delete_existing_idp(self):
"""Create and later delete IdP.
- Expect HTTP 404 for the GET IdP call.
+ Expect HTTP 404 Not Found for the GET IdP call.
"""
default_resp = self._create_default_idp()
default_idp = self._fetch_attribute_from_response(default_resp,
@@ -1072,7 +1160,6 @@ class FederatedIdentityProviderTests(FederationTests):
def test_delete_idp_also_deletes_assigned_protocols(self):
"""Deleting an IdP will delete its assigned protocol."""
-
# create default IdP
default_resp = self._create_default_idp()
default_idp = self._fetch_attribute_from_response(default_resp,
@@ -1084,7 +1171,7 @@ class FederatedIdentityProviderTests(FederationTests):
idp_url = self.base_url(suffix=idp_id)
# assign protocol to IdP
- kwargs = {'expected_status': 201}
+ kwargs = {'expected_status': http_client.CREATED}
resp, idp_id, proto = self._assign_protocol_to_idp(
url=url,
idp_id=idp_id,
@@ -1100,7 +1187,7 @@ class FederatedIdentityProviderTests(FederationTests):
def test_delete_nonexisting_idp(self):
"""Delete nonexisting IdP.
- Expect HTTP 404 for the GET IdP call.
+ Expect HTTP 404 Not Found for the GET IdP call.
"""
idp_id = uuid.uuid4().hex
url = self.base_url(suffix=idp_id)
@@ -1145,7 +1232,7 @@ class FederatedIdentityProviderTests(FederationTests):
def test_update_idp_immutable_attributes(self):
"""Update IdP's immutable parameters.
- Expect HTTP FORBIDDEN.
+ Expect HTTP BAD REQUEST.
"""
default_resp = self._create_default_idp()
@@ -1160,12 +1247,12 @@ class FederatedIdentityProviderTests(FederationTests):
url = self.base_url(suffix=idp_id)
self.patch(url, body={'identity_provider': body},
- expected_status=http_client.FORBIDDEN)
+ expected_status=http_client.BAD_REQUEST)
def test_update_nonexistent_idp(self):
"""Update nonexistent IdP
- Expect HTTP 404 code.
+ Expect HTTP 404 Not Found code.
"""
idp_id = uuid.uuid4().hex
@@ -1178,12 +1265,13 @@ class FederatedIdentityProviderTests(FederationTests):
def test_assign_protocol_to_idp(self):
"""Assign a protocol to existing IdP."""
-
- self._assign_protocol_to_idp(expected_status=201)
+ self._assign_protocol_to_idp(expected_status=http_client.CREATED)
def test_protocol_composite_pk(self):
- """Test whether Keystone let's add two entities with identical
- names, however attached to different IdPs.
+ """Test that Keystone can add two entities.
+
+ The entities have identical names, however, attached to different
+ IdPs.
1. Add IdP and assign it protocol with predefined name
2. Add another IdP and assign it a protocol with same name.
@@ -1193,7 +1281,7 @@ class FederatedIdentityProviderTests(FederationTests):
"""
url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
- kwargs = {'expected_status': 201}
+ kwargs = {'expected_status': http_client.CREATED}
self._assign_protocol_to_idp(proto='saml2',
url=url, **kwargs)
@@ -1204,12 +1292,12 @@ class FederatedIdentityProviderTests(FederationTests):
"""Test whether Keystone checks for unique idp/protocol values.
Add same protocol twice, expect Keystone to reject a latter call and
- return HTTP 409 code.
+ return HTTP 409 Conflict code.
"""
url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
- kwargs = {'expected_status': 201}
+ kwargs = {'expected_status': http_client.CREATED}
resp, idp_id, proto = self._assign_protocol_to_idp(proto='saml2',
url=url, **kwargs)
kwargs = {'expected_status': http_client.CONFLICT}
@@ -1221,10 +1309,9 @@ class FederatedIdentityProviderTests(FederationTests):
def test_assign_protocol_to_nonexistent_idp(self):
"""Assign protocol to IdP that doesn't exist.
- Expect HTTP 404 code.
+ Expect HTTP 404 Not Found code.
"""
-
idp_id = uuid.uuid4().hex
kwargs = {'expected_status': http_client.NOT_FOUND}
self._assign_protocol_to_idp(proto='saml2',
@@ -1234,8 +1321,8 @@ class FederatedIdentityProviderTests(FederationTests):
def test_get_protocol(self):
"""Create and later fetch protocol tied to IdP."""
-
- resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ resp, idp_id, proto = self._assign_protocol_to_idp(
+ expected_status=http_client.CREATED)
proto_id = self._fetch_attribute_from_response(resp, 'protocol')['id']
url = "%s/protocols/%s" % (idp_id, proto_id)
url = self.base_url(suffix=url)
@@ -1254,12 +1341,14 @@ class FederatedIdentityProviderTests(FederationTests):
Compare input and output id sets.
"""
- resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ resp, idp_id, proto = self._assign_protocol_to_idp(
+ expected_status=http_client.CREATED)
iterations = random.randint(0, 16)
protocol_ids = []
for _ in range(iterations):
- resp, _, proto = self._assign_protocol_to_idp(idp_id=idp_id,
- expected_status=201)
+ resp, _, proto = self._assign_protocol_to_idp(
+ idp_id=idp_id,
+ expected_status=http_client.CREATED)
proto_id = self._fetch_attribute_from_response(resp, 'protocol')
proto_id = proto_id['id']
protocol_ids.append(proto_id)
@@ -1277,8 +1366,8 @@ class FederatedIdentityProviderTests(FederationTests):
def test_update_protocols_attribute(self):
"""Update protocol's attribute."""
-
- resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ resp, idp_id, proto = self._assign_protocol_to_idp(
+ expected_status=http_client.CREATED)
new_mapping_id = uuid.uuid4().hex
url = "%s/protocols/%s" % (idp_id, proto)
@@ -1294,19 +1383,21 @@ class FederatedIdentityProviderTests(FederationTests):
def test_delete_protocol(self):
"""Delete protocol.
- Expect HTTP 404 code for the GET call after the protocol is deleted.
+ Expect HTTP 404 Not Found code for the GET call after the protocol is
+ deleted.
"""
url = self.base_url(suffix='/%(idp_id)s/'
'protocols/%(protocol_id)s')
- resp, idp_id, proto = self._assign_protocol_to_idp(expected_status=201)
+ resp, idp_id, proto = self._assign_protocol_to_idp(
+ expected_status=http_client.CREATED)
url = url % {'idp_id': idp_id,
'protocol_id': proto}
self.delete(url)
self.get(url, expected_status=http_client.NOT_FOUND)
-class MappingCRUDTests(FederationTests):
+class MappingCRUDTests(test_v3.RestfulTestCase):
"""A class for testing CRUD operations for Mappings."""
MAPPING_URL = '/OS-FEDERATION/mappings/'
@@ -1340,7 +1431,7 @@ class MappingCRUDTests(FederationTests):
url = self.MAPPING_URL + uuid.uuid4().hex
resp = self.put(url,
body={'mapping': mapping_fixtures.MAPPING_LARGE},
- expected_status=201)
+ expected_status=http_client.CREATED)
return resp
def _get_id_from_response(self, resp):
@@ -1357,7 +1448,7 @@ class MappingCRUDTests(FederationTests):
resp = self.get(url)
entities = resp.result.get('mappings')
self.assertIsNotNone(entities)
- self.assertResponseStatus(resp, 200)
+ self.assertResponseStatus(resp, http_client.OK)
self.assertValidListLinks(resp.result.get('links'))
self.assertEqual(1, len(entities))
@@ -1367,7 +1458,7 @@ class MappingCRUDTests(FederationTests):
mapping_id = self._get_id_from_response(resp)
url = url % {'mapping_id': str(mapping_id)}
resp = self.delete(url)
- self.assertResponseStatus(resp, 204)
+ self.assertResponseStatus(resp, http_client.NO_CONTENT)
self.get(url, expected_status=http_client.NOT_FOUND)
def test_mapping_get(self):
@@ -1463,8 +1554,8 @@ class MappingCRUDTests(FederationTests):
def test_create_mapping_with_blacklist_and_whitelist(self):
"""Test for adding whitelist and blacklist in the rule
- Server should respond with HTTP 400 error upon discovering both
- ``whitelist`` and ``blacklist`` keywords in the same rule.
+ Server should respond with HTTP 400 Bad Request error upon discovering
+ both ``whitelist`` and ``blacklist`` keywords in the same rule.
"""
url = self.MAPPING_URL + uuid.uuid4().hex
@@ -1472,8 +1563,37 @@ class MappingCRUDTests(FederationTests):
self.put(url, expected_status=http_client.BAD_REQUEST,
body={'mapping': mapping})
+ def test_create_mapping_with_local_user_and_local_domain(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ resp = self.put(
+ url,
+ body={
+ 'mapping': mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN
+ },
+ expected_status=http_client.CREATED)
+ self.assertValidMappingResponse(
+ resp, mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN)
+
+ def test_create_mapping_with_ephemeral(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ resp = self.put(
+ url,
+ body={'mapping': mapping_fixtures.MAPPING_EPHEMERAL_USER},
+ expected_status=http_client.CREATED)
+ self.assertValidMappingResponse(
+ resp, mapping_fixtures.MAPPING_EPHEMERAL_USER)
+
+ def test_create_mapping_with_bad_user_type(self):
+ url = self.MAPPING_URL + uuid.uuid4().hex
+ # get a copy of a known good map
+ bad_mapping = copy.deepcopy(mapping_fixtures.MAPPING_EPHEMERAL_USER)
+ # now sabotage the user type
+ bad_mapping['rules'][0]['local'][0]['user']['type'] = uuid.uuid4().hex
+ self.put(url, expected_status=http_client.BAD_REQUEST,
+ body={'mapping': bad_mapping})
+
-class FederatedTokenTests(FederationTests, FederatedSetupMixin):
+class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin):
def auth_plugin_config_override(self):
methods = ['saml2']
@@ -1510,7 +1630,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
self.assertTrue(note['send_notification_called'])
def load_fixtures(self, fixtures):
- super(FederationTests, self).load_fixtures(fixtures)
+ super(FederatedTokenTests, self).load_fixtures(fixtures)
self.load_federation_sample_data()
def test_issue_unscoped_token_notify(self):
@@ -1609,7 +1729,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
def test_issue_unscoped_token_with_remote_unavailable(self):
self.config_fixture.config(group='federation',
remote_id_attribute=self.REMOTE_ID_ATTR)
- self.assertRaises(exception.ValidationError,
+ self.assertRaises(exception.Unauthorized,
self._issue_unscoped_token,
idp=self.IDP_WITH_REMOTE,
environment={
@@ -1649,13 +1769,13 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
self.assertIsNotNone(r.headers.get('X-Subject-Token'))
def test_scope_to_project_once_notify(self):
- r = self.v3_authenticate_token(
+ r = self.v3_create_token(
self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
user_id = r.json['token']['user']['id']
self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL, user_id)
def test_scope_to_project_once(self):
- r = self.v3_authenticate_token(
+ r = self.v3_create_token(
self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
token_resp = r.result['token']
project_id = token_resp['project']['id']
@@ -1685,14 +1805,13 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
"""
enabled_false = {'enabled': False}
self.federation_api.update_idp(self.IDP, enabled_false)
- self.v3_authenticate_token(
+ self.v3_create_token(
self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER,
expected_status=http_client.FORBIDDEN)
def test_scope_to_bad_project(self):
"""Scope unscoped token with a project we don't have access to."""
-
- self.v3_authenticate_token(
+ self.v3_create_token(
self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER,
expected_status=http_client.UNAUTHORIZED)
@@ -1705,13 +1824,12 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
* Employees' project
"""
-
bodies = (self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN,
self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN)
project_ids = (self.proj_employees['id'],
self.proj_customers['id'])
for body, project_id_ref in zip(bodies, project_ids):
- r = self.v3_authenticate_token(body)
+ r = self.v3_create_token(body)
token_resp = r.result['token']
self._check_project_scoped_token_attributes(token_resp,
project_id_ref)
@@ -1719,7 +1837,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
def test_scope_to_project_with_only_inherited_roles(self):
"""Try to scope token whose only roles are inherited."""
self.config_fixture.config(group='os_inherit', enabled=True)
- r = self.v3_authenticate_token(
+ r = self.v3_create_token(
self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER)
token_resp = r.result['token']
self._check_project_scoped_token_attributes(
@@ -1731,7 +1849,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
def test_scope_token_from_nonexistent_unscoped_token(self):
"""Try to scope token from non-existent unscoped token."""
- self.v3_authenticate_token(
+ self.v3_create_token(
self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN,
expected_status=http_client.NOT_FOUND)
@@ -1755,7 +1873,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
assertion='CONTRACTOR_ASSERTION')
def test_scope_to_domain_once(self):
- r = self.v3_authenticate_token(self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER)
+ r = self.v3_create_token(self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER)
token_resp = r.result['token']
self._check_domain_scoped_token_attributes(token_resp,
self.domainA['id'])
@@ -1778,14 +1896,14 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
self.domainC['id'])
for body, domain_id_ref in zip(bodies, domain_ids):
- r = self.v3_authenticate_token(body)
+ r = self.v3_create_token(body)
token_resp = r.result['token']
self._check_domain_scoped_token_attributes(token_resp,
domain_id_ref)
def test_scope_to_domain_with_only_inherited_roles_fails(self):
"""Try to scope to a domain that has no direct roles."""
- self.v3_authenticate_token(
+ self.v3_create_token(
self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER,
expected_status=http_client.UNAUTHORIZED)
@@ -1816,14 +1934,14 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
# TODO(samueldmq): Create another test class for role inheritance tests.
# The advantage would be to reduce the complexity of this test class and
- # have tests specific to this fuctionality grouped, easing readability and
+ # have tests specific to this functionality grouped, easing readability and
# maintenability.
def test_list_projects_for_inherited_project_assignment(self):
# Enable os_inherit extension
self.config_fixture.config(group='os_inherit', enabled=True)
# Create a subproject
- subproject_inherited = self.new_project_ref(
+ subproject_inherited = unit.new_project_ref(
domain_id=self.domainD['id'],
parent_id=self.project_inherited['id'])
self.resource_api.create_project(subproject_inherited['id'],
@@ -1878,6 +1996,9 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
self.assertEqual(domains_ref, domains,
'match failed for url %s' % url)
+ @utils.wip('This will fail because of bug #1501032. The returned method'
+ 'list should contain "saml2". This is documented in bug '
+ '1501032.')
def test_full_workflow(self):
"""Test 'standard' workflow for granting access tokens.
@@ -1886,9 +2007,10 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
* Scope token to one of available projects
"""
-
r = self._issue_unscoped_token()
token_resp = r.json_body['token']
+ # NOTE(lbragstad): Ensure only 'saml2' is in the method list.
+ self.assertListEqual(['saml2'], token_resp['methods'])
self.assertValidMappedUser(token_resp)
employee_unscoped_token_id = r.headers.get('X-Subject-Token')
r = self.get('/auth/projects', token=employee_unscoped_token_id)
@@ -1899,8 +2021,12 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
v3_scope_request = self._scope_request(employee_unscoped_token_id,
'project', project['id'])
- r = self.v3_authenticate_token(v3_scope_request)
+ r = self.v3_create_token(v3_scope_request)
token_resp = r.result['token']
+ # FIXME(lbragstad): 'token' should be in the list of methods returned
+ # but it isn't. This is documented in bug 1501032.
+ self.assertIn('token', token_resp['methods'])
+ self.assertIn('saml2', token_resp['methods'])
self._check_project_scoped_token_attributes(token_resp, project['id'])
def test_workflow_with_groups_deletion(self):
@@ -1917,10 +2043,9 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
"""
# create group and role
- group = self.new_group_ref(
- domain_id=self.domainA['id'])
+ group = unit.new_group_ref(domain_id=self.domainA['id'])
group = self.identity_api.create_group(group)
- role = self.new_role_ref()
+ role = unit.new_role_ref()
self.role_api.create_role(role['id'], role)
# assign role to group and project_admins
@@ -1971,7 +2096,8 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
token_id, 'project',
self.project_all['id'])
- self.v3_authenticate_token(scoped_token, expected_status=500)
+ self.v3_create_token(
+ scoped_token, expected_status=http_client.INTERNAL_SERVER_ERROR)
def test_lists_with_missing_group_in_backend(self):
"""Test a mapping that points to a group that does not exist
@@ -1990,8 +2116,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
"""
domain_id = self.domainA['id']
domain_name = self.domainA['name']
- group = self.new_group_ref(domain_id=domain_id)
- group['name'] = 'EXISTS'
+ group = unit.new_group_ref(domain_id=domain_id, name='EXISTS')
group = self.identity_api.create_group(group)
rules = {
'rules': [
@@ -2047,18 +2172,16 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
assigned
"""
-
domain_id = self.domainA['id']
domain_name = self.domainA['name']
# Add a group "EXISTS"
- group_exists = self.new_group_ref(domain_id=domain_id)
- group_exists['name'] = 'EXISTS'
+ group_exists = unit.new_group_ref(domain_id=domain_id, name='EXISTS')
group_exists = self.identity_api.create_group(group_exists)
# Add a group "NO_EXISTS"
- group_no_exists = self.new_group_ref(domain_id=domain_id)
- group_no_exists['name'] = 'NO_EXISTS'
+ group_no_exists = unit.new_group_ref(domain_id=domain_id,
+ name='NO_EXISTS')
group_no_exists = self.identity_api.create_group(group_no_exists)
group_ids = set([group_exists['id'], group_no_exists['id']])
@@ -2122,18 +2245,17 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
assigned
"""
-
domain_id = self.domainA['id']
domain_name = self.domainA['name']
# Add a group "EXISTS"
- group_exists = self.new_group_ref(domain_id=domain_id)
- group_exists['name'] = 'EXISTS'
+ group_exists = unit.new_group_ref(domain_id=domain_id,
+ name='EXISTS')
group_exists = self.identity_api.create_group(group_exists)
# Add a group "NO_EXISTS"
- group_no_exists = self.new_group_ref(domain_id=domain_id)
- group_no_exists['name'] = 'NO_EXISTS'
+ group_no_exists = unit.new_group_ref(domain_id=domain_id,
+ name='NO_EXISTS')
group_no_exists = self.identity_api.create_group(group_no_exists)
group_ids = set([group_exists['id'], group_no_exists['id']])
@@ -2198,8 +2320,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
"""
domain_id = self.domainA['id']
domain_name = self.domainA['name']
- group = self.new_group_ref(domain_id=domain_id)
- group['name'] = 'EXISTS'
+ group = unit.new_group_ref(domain_id=domain_id, name='EXISTS')
group = self.identity_api.create_group(group)
rules = {
'rules': [
@@ -2262,13 +2383,13 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
domain_name = self.domainA['name']
# Add a group "EXISTS"
- group_exists = self.new_group_ref(domain_id=domain_id)
- group_exists['name'] = 'EXISTS'
+ group_exists = unit.new_group_ref(domain_id=domain_id,
+ name='EXISTS')
group_exists = self.identity_api.create_group(group_exists)
# Add a group "NO_EXISTS"
- group_no_exists = self.new_group_ref(domain_id=domain_id)
- group_no_exists['name'] = 'NO_EXISTS'
+ group_no_exists = unit.new_group_ref(domain_id=domain_id,
+ name='NO_EXISTS')
group_no_exists = self.identity_api.create_group(group_no_exists)
group_ids = set([group_exists['id'], group_no_exists['id']])
@@ -2362,7 +2483,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
self._check_domains_are_valid(r.json_body['token'])
def test_scoped_token_has_user_domain(self):
- r = self.v3_authenticate_token(
+ r = self.v3_create_token(
self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE)
self._check_domains_are_valid(r.result['token'])
@@ -2383,7 +2504,7 @@ class FederatedTokenTests(FederationTests, FederatedSetupMixin):
assertion='ANOTHER_LOCAL_USER_ASSERTION')
-class FernetFederatedTokenTests(FederationTests, FederatedSetupMixin):
+class FernetFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin):
AUTH_METHOD = 'token'
def load_fixtures(self, fixtures):
@@ -2436,7 +2557,7 @@ class FernetFederatedTokenTests(FederationTests, FederatedSetupMixin):
v3_scope_request = self._scope_request(unscoped_token,
'project', project['id'])
- resp = self.v3_authenticate_token(v3_scope_request)
+ resp = self.v3_create_token(v3_scope_request)
token_resp = resp.result['token']
self._check_project_scoped_token_attributes(token_resp, project['id'])
@@ -2448,6 +2569,7 @@ class FederatedTokenTestsMethodToken(FederatedTokenTests):
way for scoping all the tokens.
"""
+
AUTH_METHOD = 'token'
def auth_plugin_config_override(self):
@@ -2455,8 +2577,67 @@ class FederatedTokenTestsMethodToken(FederatedTokenTests):
super(FederatedTokenTests,
self).auth_plugin_config_override(methods)
+ @utils.wip('This will fail because of bug #1501032. The returned method'
+ 'list should contain "saml2". This is documented in bug '
+ '1501032.')
+ def test_full_workflow(self):
+ """Test 'standard' workflow for granting access tokens.
+
+ * Issue unscoped token
+ * List available projects based on groups
+ * Scope token to one of available projects
+
+ """
+ r = self._issue_unscoped_token()
+ token_resp = r.json_body['token']
+ # NOTE(lbragstad): Ensure only 'saml2' is in the method list.
+ self.assertListEqual(['saml2'], token_resp['methods'])
+ self.assertValidMappedUser(token_resp)
+ employee_unscoped_token_id = r.headers.get('X-Subject-Token')
+ r = self.get('/auth/projects', token=employee_unscoped_token_id)
+ projects = r.result['projects']
+ random_project = random.randint(0, len(projects)) - 1
+ project = projects[random_project]
+
+ v3_scope_request = self._scope_request(employee_unscoped_token_id,
+ 'project', project['id'])
+
+ r = self.v3_authenticate_token(v3_scope_request)
+ token_resp = r.result['token']
+ self.assertIn('token', token_resp['methods'])
+ self.assertIn('saml2', token_resp['methods'])
+ self._check_project_scoped_token_attributes(token_resp, project['id'])
+
+
+class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin):
+ """Tests for federated users
+
+ Tests new shadow users functionality
+
+ """
+
+ def auth_plugin_config_override(self):
+ methods = ['saml2']
+ super(FederatedUserTests, self).auth_plugin_config_override(methods)
+
+ def setUp(self):
+ super(FederatedUserTests, self).setUp()
+
+ def load_fixtures(self, fixtures):
+ super(FederatedUserTests, self).load_fixtures(fixtures)
+ self.load_federation_sample_data()
+
+ def test_user_id_persistense(self):
+ """Ensure user_id is persistend for multiple federated authn calls."""
+ r = self._issue_unscoped_token()
+ user_id = r.json_body['token']['user']['id']
-class JsonHomeTests(FederationTests, test_v3.JsonHomeTestMixin):
+ r = self._issue_unscoped_token()
+ user_id2 = r.json_body['token']['user']['id']
+ self.assertEqual(user_id, user_id2)
+
+
+class JsonHomeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
JSON_HOME_DATA = {
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-FEDERATION/'
'1.0/rel/identity_provider': {
@@ -2484,7 +2665,7 @@ def _load_xml(filename):
return xml.read()
-class SAMLGenerationTests(FederationTests):
+class SAMLGenerationTests(test_v3.RestfulTestCase):
SP_AUTH_URL = ('http://beta.com:5000/v3/OS-FEDERATION/identity_providers'
'/BETA/protocols/saml2/auth')
@@ -2523,7 +2704,7 @@ class SAMLGenerationTests(FederationTests):
self.sp = self.sp_ref()
url = '/OS-FEDERATION/service_providers/' + self.SERVICE_PROVDIER_ID
self.put(url, body={'service_provider': self.sp},
- expected_status=201)
+ expected_status=http_client.CREATED)
def test_samlize_token_values(self):
"""Test the SAML generator produces a SAML object.
@@ -2665,7 +2846,7 @@ class SAMLGenerationTests(FederationTests):
"""
if not _is_xmlsec1_installed():
- self.skip('xmlsec1 is not installed')
+ self.skipTest('xmlsec1 is not installed')
generator = keystone_idp.SAMLGenerator()
response = generator.samlize_token(self.ISSUER, self.RECIPIENT,
@@ -2709,7 +2890,7 @@ class SAMLGenerationTests(FederationTests):
user_id=self.user['id'],
password=self.user['password'],
project_id=self.project['id'])
- resp = self.v3_authenticate_token(auth_data)
+ resp = self.v3_create_token(auth_data)
token_id = resp.headers.get('X-Subject-Token')
return token_id
@@ -2718,7 +2899,7 @@ class SAMLGenerationTests(FederationTests):
user_id=self.user['id'],
password=self.user['password'],
user_domain_id=self.domain['id'])
- resp = self.v3_authenticate_token(auth_data)
+ resp = self.v3_create_token(auth_data)
token_id = resp.headers.get('X-Subject-Token')
return token_id
@@ -2757,7 +2938,7 @@ class SAMLGenerationTests(FederationTests):
return_value=self.signed_assertion):
http_response = self.post(self.SAML_GENERATION_ROUTE, body=body,
response_content_type='text/xml',
- expected_status=200)
+ expected_status=http_client.OK)
response = etree.fromstring(http_response.result)
issuer = response[0]
@@ -2789,10 +2970,9 @@ class SAMLGenerationTests(FederationTests):
def test_invalid_scope_body(self):
"""Test that missing the scope in request body raises an exception.
- Raises exception.SchemaValidationError() - error code 400
+ Raises exception.SchemaValidationError() - error 400 Bad Request
"""
-
token_id = uuid.uuid4().hex
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
@@ -2804,10 +2984,9 @@ class SAMLGenerationTests(FederationTests):
def test_invalid_token_body(self):
"""Test that missing the token in request body raises an exception.
- Raises exception.SchemaValidationError() - error code 400
+ Raises exception.SchemaValidationError() - error 400 Bad Request
"""
-
token_id = uuid.uuid4().hex
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
@@ -2819,7 +2998,7 @@ class SAMLGenerationTests(FederationTests):
def test_sp_not_found(self):
"""Test SAML generation with an invalid service provider ID.
- Raises exception.ServiceProviderNotFound() - error code 404
+ Raises exception.ServiceProviderNotFound() - error Not Found 404
"""
sp_id = uuid.uuid4().hex
@@ -2830,7 +3009,6 @@ class SAMLGenerationTests(FederationTests):
def test_sp_disabled(self):
"""Try generating assertion for disabled Service Provider."""
-
# Disable Service Provider
sp_ref = {'enabled': False}
self.federation_api.update_sp(self.SERVICE_PROVDIER_ID, sp_ref)
@@ -2844,10 +3022,9 @@ class SAMLGenerationTests(FederationTests):
def test_token_not_found(self):
"""Test that an invalid token in the request body raises an exception.
- Raises exception.TokenNotFound() - error code 404
+ Raises exception.TokenNotFound() - error Not Found 404
"""
-
token_id = uuid.uuid4().hex
body = self._create_generate_saml_request(token_id,
self.SERVICE_PROVDIER_ID)
@@ -2863,7 +3040,6 @@ class SAMLGenerationTests(FederationTests):
The controller should return a SAML assertion that is wrapped in a
SOAP envelope.
"""
-
self.config_fixture.config(group='saml', idp_entity_id=self.ISSUER)
token_id = self._fetch_valid_token()
body = self._create_generate_saml_request(token_id,
@@ -2873,7 +3049,7 @@ class SAMLGenerationTests(FederationTests):
return_value=self.signed_assertion):
http_response = self.post(self.ECP_GENERATION_ROUTE, body=body,
response_content_type='text/xml',
- expected_status=200)
+ expected_status=http_client.OK)
env_response = etree.fromstring(http_response.result)
header = env_response[0]
@@ -2956,7 +3132,7 @@ class SAMLGenerationTests(FederationTests):
self.assertEqual(expected_log, logger_fixture.output)
-class IdPMetadataGenerationTests(FederationTests):
+class IdPMetadataGenerationTests(test_v3.RestfulTestCase):
"""A class for testing Identity Provider Metadata generation."""
METADATA_URL = '/OS-FEDERATION/saml2/metadata'
@@ -3073,20 +3249,20 @@ class IdPMetadataGenerationTests(FederationTests):
self.generator.generate_metadata)
def test_get_metadata_with_no_metadata_file_configured(self):
- self.get(self.METADATA_URL, expected_status=500)
+ self.get(self.METADATA_URL,
+ expected_status=http_client.INTERNAL_SERVER_ERROR)
def test_get_metadata(self):
self.config_fixture.config(
group='saml', idp_metadata_path=XMLDIR + '/idp_saml2_metadata.xml')
- r = self.get(self.METADATA_URL, response_content_type='text/xml',
- expected_status=200)
+ r = self.get(self.METADATA_URL, response_content_type='text/xml')
self.assertEqual('text/xml', r.headers.get('Content-Type'))
reference_file = _load_xml('idp_saml2_metadata.xml')
self.assertEqual(reference_file, r.result)
-class ServiceProviderTests(FederationTests):
+class ServiceProviderTests(test_v3.RestfulTestCase):
"""A test class for Service Providers."""
MEMBER_NAME = 'service_provider'
@@ -3096,13 +3272,13 @@ class ServiceProviderTests(FederationTests):
'relay_state_prefix', 'sp_url']
def setUp(self):
- super(FederationTests, self).setUp()
+ super(ServiceProviderTests, self).setUp()
# Add a Service Provider
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
self.SP_REF = self.sp_ref()
self.SERVICE_PROVIDER = self.put(
url, body={'service_provider': self.SP_REF},
- expected_status=201).result
+ expected_status=http_client.CREATED).result
def sp_ref(self):
ref = {
@@ -3119,9 +3295,18 @@ class ServiceProviderTests(FederationTests):
return '/OS-FEDERATION/service_providers/' + str(suffix)
return '/OS-FEDERATION/service_providers'
+ def _create_default_sp(self, body=None):
+ """Create default Service Provider."""
+ url = self.base_url(suffix=uuid.uuid4().hex)
+ if body is None:
+ body = self.sp_ref()
+ resp = self.put(url, body={'service_provider': body},
+ expected_status=http_client.CREATED)
+ return resp
+
def test_get_service_provider(self):
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- resp = self.get(url, expected_status=200)
+ resp = self.get(url)
self.assertValidEntity(resp.result['service_provider'],
keys_to_check=self.SP_KEYS)
@@ -3133,7 +3318,7 @@ class ServiceProviderTests(FederationTests):
url = self.base_url(suffix=uuid.uuid4().hex)
sp = self.sp_ref()
resp = self.put(url, body={'service_provider': sp},
- expected_status=201)
+ expected_status=http_client.CREATED)
self.assertValidEntity(resp.result['service_provider'],
keys_to_check=self.SP_KEYS)
@@ -3143,7 +3328,7 @@ class ServiceProviderTests(FederationTests):
sp = self.sp_ref()
del sp['relay_state_prefix']
resp = self.put(url, body={'service_provider': sp},
- expected_status=201)
+ expected_status=http_client.CREATED)
sp_result = resp.result['service_provider']
self.assertEqual(CONF.saml.relay_state_prefix,
sp_result['relay_state_prefix'])
@@ -3155,7 +3340,7 @@ class ServiceProviderTests(FederationTests):
non_default_prefix = uuid.uuid4().hex
sp['relay_state_prefix'] = non_default_prefix
resp = self.put(url, body={'service_provider': sp},
- expected_status=201)
+ expected_status=http_client.CREATED)
sp_result = resp.result['service_provider']
self.assertEqual(non_default_prefix,
sp_result['relay_state_prefix'])
@@ -3182,7 +3367,8 @@ class ServiceProviderTests(FederationTests):
}
for id, sp in ref_service_providers.items():
url = self.base_url(suffix=id)
- self.put(url, body={'service_provider': sp}, expected_status=201)
+ self.put(url, body={'service_provider': sp},
+ expected_status=http_client.CREATED)
# Insert ids into service provider object, we will compare it with
# responses from server and those include 'id' attribute.
@@ -3209,15 +3395,14 @@ class ServiceProviderTests(FederationTests):
"""
new_sp_ref = self.sp_ref()
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- resp = self.patch(url, body={'service_provider': new_sp_ref},
- expected_status=200)
+ resp = self.patch(url, body={'service_provider': new_sp_ref})
patch_result = resp.result
new_sp_ref['id'] = self.SERVICE_PROVIDER_ID
self.assertValidEntity(patch_result['service_provider'],
ref=new_sp_ref,
keys_to_check=self.SP_KEYS)
- resp = self.get(url, expected_status=200)
+ resp = self.get(url)
get_result = resp.result
self.assertDictEqual(patch_result['service_provider'],
@@ -3227,7 +3412,7 @@ class ServiceProviderTests(FederationTests):
"""Update immutable attributes in service provider.
In this particular case the test will try to change ``id`` attribute.
- The server should return an HTTP 403 error code.
+ The server should return an HTTP 403 Forbidden error code.
"""
new_sp_ref = {'id': uuid.uuid4().hex}
@@ -3242,7 +3427,7 @@ class ServiceProviderTests(FederationTests):
self.patch(url, body={'service_provider': new_sp_ref},
expected_status=http_client.BAD_REQUEST)
- def test_update_service_provider_404(self):
+ def test_update_service_provider_returns_not_found(self):
new_sp_ref = self.sp_ref()
new_sp_ref['description'] = uuid.uuid4().hex
url = self.base_url(suffix=uuid.uuid4().hex)
@@ -3250,25 +3435,74 @@ class ServiceProviderTests(FederationTests):
expected_status=http_client.NOT_FOUND)
def test_update_sp_relay_state(self):
- """Update an SP with custome relay state."""
+ """Update an SP with custom relay state."""
new_sp_ref = self.sp_ref()
non_default_prefix = uuid.uuid4().hex
new_sp_ref['relay_state_prefix'] = non_default_prefix
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- resp = self.patch(url, body={'service_provider': new_sp_ref},
- expected_status=200)
+ resp = self.patch(url, body={'service_provider': new_sp_ref})
sp_result = resp.result['service_provider']
self.assertEqual(non_default_prefix,
sp_result['relay_state_prefix'])
def test_delete_service_provider(self):
url = self.base_url(suffix=self.SERVICE_PROVIDER_ID)
- self.delete(url, expected_status=204)
+ self.delete(url)
- def test_delete_service_provider_404(self):
+ def test_delete_service_provider_returns_not_found(self):
url = self.base_url(suffix=uuid.uuid4().hex)
self.delete(url, expected_status=http_client.NOT_FOUND)
+ def test_filter_list_sp_by_id(self):
+ def get_id(resp):
+ sp = resp.result.get('service_provider')
+ return sp.get('id')
+
+ sp1_id = get_id(self._create_default_sp())
+ sp2_id = get_id(self._create_default_sp())
+
+ # list the SP, should get SPs.
+ url = self.base_url()
+ resp = self.get(url)
+ sps = resp.result.get('service_providers')
+ entities_ids = [e['id'] for e in sps]
+ self.assertIn(sp1_id, entities_ids)
+ self.assertIn(sp2_id, entities_ids)
+
+ # filter the SP by 'id'. Only SP1 should appear.
+ url = self.base_url() + '?id=' + sp1_id
+ resp = self.get(url)
+ sps = resp.result.get('service_providers')
+ entities_ids = [e['id'] for e in sps]
+ self.assertIn(sp1_id, entities_ids)
+ self.assertNotIn(sp2_id, entities_ids)
+
+ def test_filter_list_sp_by_enabled(self):
+ def get_id(resp):
+ sp = resp.result.get('service_provider')
+ return sp.get('id')
+
+ sp1_id = get_id(self._create_default_sp())
+ sp2_ref = self.sp_ref()
+ sp2_ref['enabled'] = False
+ sp2_id = get_id(self._create_default_sp(body=sp2_ref))
+
+ # list the SP, should get two SPs.
+ url = self.base_url()
+ resp = self.get(url)
+ sps = resp.result.get('service_providers')
+ entities_ids = [e['id'] for e in sps]
+ self.assertIn(sp1_id, entities_ids)
+ self.assertIn(sp2_id, entities_ids)
+
+ # filter the SP by 'enabled'. Only SP1 should appear.
+ url = self.base_url() + '?enabled=True'
+ resp = self.get(url)
+ sps = resp.result.get('service_providers')
+ entities_ids = [e['id'] for e in sps]
+ self.assertIn(sp1_id, entities_ids)
+ self.assertNotIn(sp2_id, entities_ids)
+
class WebSSOTests(FederatedTokenTests):
"""A class for testing Web SSO."""
@@ -3306,6 +3540,21 @@ class WebSSOTests(FederatedTokenTests):
resp = self.api.federated_sso_auth(context, self.PROTOCOL)
self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
+ def test_get_sso_origin_host_case_insensitive(self):
+ # test lowercase hostname in trusted_dashboard
+ context = {
+ 'query_string': {
+ 'origin': "http://horizon.com",
+ },
+ }
+ host = self.api._get_sso_origin_host(context)
+ self.assertEqual("http://horizon.com", host)
+ # test uppercase hostname in trusted_dashboard
+ self.config_fixture.config(group='federation',
+ trusted_dashboard=['http://Horizon.com'])
+ host = self.api._get_sso_origin_host(context)
+ self.assertEqual("http://horizon.com", host)
+
def test_federated_sso_auth_with_protocol_specific_remote_id(self):
self.config_fixture.config(
group=self.PROTOCOL,
@@ -3380,7 +3629,7 @@ class WebSSOTests(FederatedTokenTests):
self.assertIn(self.TRUSTED_DASHBOARD, resp.body)
-class K2KServiceCatalogTests(FederationTests):
+class K2KServiceCatalogTests(test_v3.RestfulTestCase):
SP1 = 'SP1'
SP2 = 'SP2'
SP3 = 'SP3'
@@ -3429,11 +3678,10 @@ class K2KServiceCatalogTests(FederationTests):
for entity in service_providers:
id = entity.get('id')
ref_entity = self.sp_response(id, ref.get(id))
- self.assertDictEqual(ref_entity, entity)
+ self.assertDictEqual(entity, ref_entity)
def test_service_providers_in_token(self):
"""Check if service providers are listed in service catalog."""
-
token = self.token_v3_helper.get_token_data(self.user_id, ['password'])
ref = {}
for r in (self.sp_alpha, self.sp_beta, self.sp_gamma):
diff --git a/keystone-moon/keystone/tests/unit/test_v3_filters.py b/keystone-moon/keystone/tests/unit/test_v3_filters.py
index 668a2308..9dc19af5 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_filters.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_filters.py
@@ -13,13 +13,13 @@
# License for the specific language governing permissions and limitations
# under the License.
-import uuid
-
from oslo_config import cfg
from oslo_serialization import jsonutils
from six.moves import range
+from keystone.tests import unit
from keystone.tests.unit import filtering
+from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
from keystone.tests.unit import test_v3
@@ -31,14 +31,14 @@ class IdentityTestFilteredCase(filtering.FilterTests,
test_v3.RestfulTestCase):
"""Test filter enforcement on the v3 Identity API."""
+ def _policy_fixture(self):
+ return ksfixtures.Policy(self.tmpfilename, self.config_fixture)
+
def setUp(self):
"""Setup for Identity Filter Test Cases."""
-
- super(IdentityTestFilteredCase, self).setUp()
self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
self.tmpfilename = self.tempfile.file_name
- self.config_fixture.config(group='oslo_policy',
- policy_file=self.tmpfilename)
+ super(IdentityTestFilteredCase, self).setUp()
def load_sample_data(self):
"""Create sample data for these tests.
@@ -57,32 +57,23 @@ class IdentityTestFilteredCase(filtering.FilterTests,
"""
# Start by creating a few domains
self._populate_default_domain()
- self.domainA = self.new_domain_ref()
+ self.domainA = unit.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = self.new_domain_ref()
+ self.domainB = unit.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.domainC = self.new_domain_ref()
+ self.domainC = unit.new_domain_ref()
self.domainC['enabled'] = False
self.resource_api.create_domain(self.domainC['id'], self.domainC)
# Now create some users, one in domainA and two of them in domainB
- self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
- password = uuid.uuid4().hex
- self.user1['password'] = password
- self.user1 = self.identity_api.create_user(self.user1)
- self.user1['password'] = password
-
- self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
- self.user2['password'] = password
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
-
- self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
- self.user3['password'] = password
- self.user3 = self.identity_api.create_user(self.user3)
- self.user3['password'] = password
-
- self.role = self.new_role_ref()
+ self.user1 = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
+ self.user2 = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
+ self.user3 = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
+
+ self.role = unit.new_role_ref()
self.role_api.create_role(self.role['id'], self.role)
self.assignment_api.create_grant(self.role['id'],
user_id=self.user1['id'],
@@ -311,7 +302,7 @@ class IdentityTestFilteredCase(filtering.FilterTests,
# See if we can add a SQL command...use the group table instead of the
# user table since 'user' is reserved word for SQLAlchemy.
- group = self.new_group_ref(domain_id=self.domainB['id'])
+ group = unit.new_group_ref(domain_id=self.domainB['id'])
group = self.identity_api.create_group(group)
url_by_name = "/users?name=x'; drop table group"
@@ -325,11 +316,11 @@ class IdentityTestFilteredCase(filtering.FilterTests,
class IdentityTestListLimitCase(IdentityTestFilteredCase):
"""Test list limiting enforcement on the v3 Identity API."""
+
content_type = 'json'
def setUp(self):
"""Setup for Identity Limit Test Cases."""
-
super(IdentityTestListLimitCase, self).setUp()
# Create 10 entries for each of the entities we are going to test
@@ -343,7 +334,7 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
self.service_list = []
self.addCleanup(self.clean_up_service)
for _ in range(10):
- new_entity = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex}
+ new_entity = unit.new_service_ref()
service = self.catalog_api.create_service(new_entity['id'],
new_entity)
self.service_list.append(service)
@@ -351,26 +342,22 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
self.policy_list = []
self.addCleanup(self.clean_up_policy)
for _ in range(10):
- new_entity = {'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex,
- 'blob': uuid.uuid4().hex}
+ new_entity = unit.new_policy_ref()
policy = self.policy_api.create_policy(new_entity['id'],
new_entity)
self.policy_list.append(policy)
def clean_up_entity(self, entity):
"""Clean up entity test data from Identity Limit Test Cases."""
-
self._delete_test_data(entity, self.entity_lists[entity])
def clean_up_service(self):
"""Clean up service test data from Identity Limit Test Cases."""
-
for service in self.service_list:
self.catalog_api.delete_service(service['id'])
def clean_up_policy(self):
"""Clean up policy test data from Identity Limit Test Cases."""
-
for policy in self.policy_list:
self.policy_api.delete_policy(policy['id'])
@@ -430,7 +417,6 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
def test_no_limit(self):
"""Check truncated attribute not set when list not limited."""
-
self._set_policy({"identity:list_services": []})
r = self.get('/services', auth=self.auth)
self.assertEqual(10, len(r.result.get('services')))
@@ -438,7 +424,6 @@ class IdentityTestListLimitCase(IdentityTestFilteredCase):
def test_at_limit(self):
"""Check truncated attribute not set when list at max size."""
-
# Test this by overriding the general limit with a higher
# driver-specific limit (allowing all entities to be returned
# in the collection), which should result in a non truncated list
diff --git a/keystone-moon/keystone/tests/unit/test_v3_identity.py b/keystone-moon/keystone/tests/unit/test_v3_identity.py
index 5a8e4fd5..7d3f6cad 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_identity.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_identity.py
@@ -30,31 +30,63 @@ from keystone.tests.unit import test_v3
CONF = cfg.CONF
+# NOTE(morganfainberg): To be removed when admin_token_auth middleware is
+# removed. This was moved to it's own testcase so it can setup the
+# admin_token_auth pipeline without impacting other tests.
+class IdentityTestCaseStaticAdminToken(test_v3.RestfulTestCase):
+ EXTENSION_TO_ADD = 'admin_token_auth'
+
+ def config_overrides(self):
+ super(IdentityTestCaseStaticAdminToken, self).config_overrides()
+ self.config_fixture.config(
+ admin_token='ADMIN')
+
+ def test_list_users_with_static_admin_token_and_multiple_backends(self):
+ # domain-specific operations with the bootstrap ADMIN token is
+ # disallowed when domain-specific drivers are enabled
+ self.config_fixture.config(group='identity',
+ domain_specific_drivers_enabled=True)
+ self.get('/users', token=CONF.admin_token,
+ expected_status=exception.Unauthorized.code)
+
+ def test_create_user_with_admin_token_and_no_domain(self):
+ """Call ``POST /users`` with admin token but no domain id.
+
+ It should not be possible to use the admin token to create a user
+ while not explicitly passing the domain in the request body.
+
+ """
+ # Passing a valid domain id to new_user_ref() since domain_id is
+ # not an optional parameter.
+ ref = unit.new_user_ref(domain_id=self.domain_id)
+ # Delete the domain id before sending the request.
+ del ref['domain_id']
+ self.post('/users', body={'user': ref}, token=CONF.admin_token,
+ expected_status=http_client.BAD_REQUEST)
+
+
class IdentityTestCase(test_v3.RestfulTestCase):
"""Test users and groups."""
def setUp(self):
super(IdentityTestCase, self).setUp()
- self.group = self.new_group_ref(
- domain_id=self.domain_id)
+ self.group = unit.new_group_ref(domain_id=self.domain_id)
self.group = self.identity_api.create_group(self.group)
self.group_id = self.group['id']
- self.credential_id = uuid.uuid4().hex
- self.credential = self.new_credential_ref(
+ self.credential = unit.new_credential_ref(
user_id=self.user['id'],
project_id=self.project_id)
- self.credential['id'] = self.credential_id
- self.credential_api.create_credential(
- self.credential_id,
- self.credential)
+
+ self.credential_api.create_credential(self.credential['id'],
+ self.credential)
# user crud tests
def test_create_user(self):
"""Call ``POST /users``."""
- ref = self.new_user_ref(domain_id=self.domain_id)
+ ref = unit.new_user_ref(domain_id=self.domain_id)
r = self.post(
'/users',
body={'user': ref})
@@ -70,17 +102,14 @@ class IdentityTestCase(test_v3.RestfulTestCase):
"""
# Create a user with a role on the domain so we can get a
# domain scoped token
- domain = self.new_domain_ref()
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user = self.new_user_ref(domain_id=domain['id'])
- password = user['password']
- user = self.identity_api.create_user(user)
- user['password'] = password
+ user = unit.create_user(self.identity_api, domain_id=domain['id'])
self.assignment_api.create_grant(
role_id=self.role_id, user_id=user['id'],
domain_id=domain['id'])
- ref = self.new_user_ref(domain_id=domain['id'])
+ ref = unit.new_user_ref(domain_id=domain['id'])
ref_nd = ref.copy()
ref_nd.pop('domain_id')
auth = self.build_authentication_request(
@@ -91,7 +120,7 @@ class IdentityTestCase(test_v3.RestfulTestCase):
self.assertValidUserResponse(r, ref)
# Now try the same thing without a domain token - which should fail
- ref = self.new_user_ref(domain_id=domain['id'])
+ ref = unit.new_user_ref(domain_id=domain['id'])
ref_nd = ref.copy()
ref_nd.pop('domain_id')
auth = self.build_authentication_request(
@@ -112,6 +141,79 @@ class IdentityTestCase(test_v3.RestfulTestCase):
ref['domain_id'] = CONF.identity.default_domain_id
return self.assertValidUserResponse(r, ref)
+ def test_create_user_with_admin_token_and_domain(self):
+ """Call ``POST /users`` with admin token and domain id."""
+ ref = unit.new_user_ref(domain_id=self.domain_id)
+ self.post('/users', body={'user': ref}, token=self.get_admin_token(),
+ expected_status=http_client.CREATED)
+
+ def test_user_management_normalized_keys(self):
+ """Illustrate the inconsistent handling of hyphens in keys.
+
+ To quote Morgan in bug 1526244:
+
+ the reason this is converted from "domain-id" to "domain_id" is
+ because of how we process/normalize data. The way we have to handle
+ specific data types for known columns requires avoiding "-" in the
+ actual python code since "-" is not valid for attributes in python
+ w/o significant use of "getattr" etc.
+
+ In short, historically we handle some things in conversions. The
+ use of "extras" has long been a poor design choice that leads to
+ odd/strange inconsistent behaviors because of other choices made in
+ handling data from within the body. (In many cases we convert from
+ "-" to "_" throughout openstack)
+
+ Source: https://bugs.launchpad.net/keystone/+bug/1526244/comments/9
+
+ """
+ # Create two domains to work with.
+ domain1 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain1['id'], domain1)
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+
+ # We can successfully create a normal user without any surprises.
+ user = unit.new_user_ref(domain_id=domain1['id'])
+ r = self.post(
+ '/users',
+ body={'user': user})
+ self.assertValidUserResponse(r, user)
+ user['id'] = r.json['user']['id']
+
+ # Query strings are not normalized: so we get all users back (like
+ # self.user), not just the ones in the specified domain.
+ r = self.get(
+ '/users?domain-id=%s' % domain1['id'])
+ self.assertValidUserListResponse(r, ref=self.user)
+ self.assertNotEqual(domain1['id'], self.user['domain_id'])
+
+ # When creating a new user, if we move the 'domain_id' into the
+ # 'domain-id' attribute, the server will normalize the request
+ # attribute, and effectively "move it back" for us.
+ user = unit.new_user_ref(domain_id=domain1['id'])
+ user['domain-id'] = user.pop('domain_id')
+ r = self.post(
+ '/users',
+ body={'user': user})
+ self.assertNotIn('domain-id', r.json['user'])
+ self.assertEqual(domain1['id'], r.json['user']['domain_id'])
+ # (move this attribute back so we can use assertValidUserResponse)
+ user['domain_id'] = user.pop('domain-id')
+ self.assertValidUserResponse(r, user)
+ user['id'] = r.json['user']['id']
+
+ # If we try updating the user's 'domain_id' by specifying a
+ # 'domain-id', then it'll be stored into extras rather than normalized,
+ # and the user's actual 'domain_id' is not affected.
+ r = self.patch(
+ '/users/%s' % user['id'],
+ body={'user': {'domain-id': domain2['id']}})
+ self.assertEqual(domain2['id'], r.json['user']['domain-id'])
+ self.assertEqual(user['domain_id'], r.json['user']['domain_id'])
+ self.assertNotEqual(domain2['id'], user['domain_id'])
+ self.assertValidUserResponse(r, user)
+
def test_create_user_bad_request(self):
"""Call ``POST /users``."""
self.post('/users', body={'user': {}},
@@ -134,29 +236,42 @@ class IdentityTestCase(test_v3.RestfulTestCase):
self.config_fixture.config(group='identity',
domain_specific_drivers_enabled=True)
- # Create a user with a role on the domain so we can get a
- # domain scoped token
- domain = self.new_domain_ref()
+ # Create a new domain with a new project and user
+ domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
- user = self.new_user_ref(domain_id=domain['id'])
- password = user['password']
- user = self.identity_api.create_user(user)
- user['password'] = password
+
+ project = unit.new_project_ref(domain_id=domain['id'])
+ self.resource_api.create_project(project['id'], project)
+
+ user = unit.create_user(self.identity_api, domain_id=domain['id'])
+
+ # Create both project and domain role grants for the user so we
+ # can get both project and domain scoped tokens
self.assignment_api.create_grant(
role_id=self.role_id, user_id=user['id'],
domain_id=domain['id'])
+ self.assignment_api.create_grant(
+ role_id=self.role_id, user_id=user['id'],
+ project_id=project['id'])
- ref = self.new_user_ref(domain_id=domain['id'])
- ref_nd = ref.copy()
- ref_nd.pop('domain_id')
- auth = self.build_authentication_request(
+ dom_auth = self.build_authentication_request(
user_id=user['id'],
password=user['password'],
domain_id=domain['id'])
+ project_auth = self.build_authentication_request(
+ user_id=user['id'],
+ password=user['password'],
+ project_id=project['id'])
# First try using a domain scoped token
resource_url = '/users'
- r = self.get(resource_url, auth=auth)
+ r = self.get(resource_url, auth=dom_auth)
+ self.assertValidUserListResponse(r, ref=user,
+ resource_url=resource_url)
+
+ # Now try using a project scoped token
+ resource_url = '/users'
+ r = self.get(resource_url, auth=project_auth)
self.assertValidUserListResponse(r, ref=user,
resource_url=resource_url)
@@ -167,21 +282,9 @@ class IdentityTestCase(test_v3.RestfulTestCase):
self.assertValidUserListResponse(r, ref=user,
resource_url=resource_url)
- # Now try the same thing without a domain token or filter,
- # which should fail
- r = self.get('/users', expected_status=exception.Unauthorized.code)
-
- def test_list_users_with_static_admin_token_and_multiple_backends(self):
- # domain-specific operations with the bootstrap ADMIN token is
- # disallowed when domain-specific drivers are enabled
- self.config_fixture.config(group='identity',
- domain_specific_drivers_enabled=True)
- self.get('/users', token=CONF.admin_token,
- expected_status=exception.Unauthorized.code)
-
def test_list_users_no_default_project(self):
"""Call ``GET /users`` making sure no default_project_id."""
- user = self.new_user_ref(self.domain_id)
+ user = unit.new_user_ref(self.domain_id)
user = self.identity_api.create_user(user)
resource_url = '/users'
r = self.get(resource_url)
@@ -196,7 +299,7 @@ class IdentityTestCase(test_v3.RestfulTestCase):
def test_get_user_with_default_project(self):
"""Call ``GET /users/{user_id}`` making sure of default_project_id."""
- user = self.new_user_ref(domain_id=self.domain_id,
+ user = unit.new_user_ref(domain_id=self.domain_id,
project_id=self.project_id)
user = self.identity_api.create_user(user)
r = self.get('/users/%(user_id)s' % {'user_id': user['id']})
@@ -209,45 +312,39 @@ class IdentityTestCase(test_v3.RestfulTestCase):
def test_list_groups_for_user(self):
"""Call ``GET /users/{user_id}/groups``."""
+ user1 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+ user2 = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
- self.user1 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user1['password']
- self.user1 = self.identity_api.create_user(self.user1)
- self.user1['password'] = password
- self.user2 = self.new_user_ref(
- domain_id=self.domain['id'])
- password = self.user2['password']
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
self.put('/groups/%(group_id)s/users/%(user_id)s' % {
- 'group_id': self.group_id, 'user_id': self.user1['id']})
+ 'group_id': self.group_id, 'user_id': user1['id']})
# Scenarios below are written to test the default policy configuration
# One should be allowed to list one's own groups
auth = self.build_authentication_request(
- user_id=self.user1['id'],
- password=self.user1['password'])
+ user_id=user1['id'],
+ password=user1['password'])
resource_url = ('/users/%(user_id)s/groups' %
- {'user_id': self.user1['id']})
+ {'user_id': user1['id']})
r = self.get(resource_url, auth=auth)
self.assertValidGroupListResponse(r, ref=self.group,
resource_url=resource_url)
# Administrator is allowed to list others' groups
resource_url = ('/users/%(user_id)s/groups' %
- {'user_id': self.user1['id']})
+ {'user_id': user1['id']})
r = self.get(resource_url)
self.assertValidGroupListResponse(r, ref=self.group,
resource_url=resource_url)
# Ordinary users should not be allowed to list other's groups
auth = self.build_authentication_request(
- user_id=self.user2['id'],
- password=self.user2['password'])
+ user_id=user2['id'],
+ password=user2['password'])
r = self.get('/users/%(user_id)s/groups' % {
- 'user_id': self.user1['id']}, auth=auth,
+ 'user_id': user1['id']}, auth=auth,
expected_status=exception.ForbiddenAction.code)
def test_check_user_in_group(self):
@@ -278,7 +375,7 @@ class IdentityTestCase(test_v3.RestfulTestCase):
def test_update_user(self):
"""Call ``PATCH /users/{user_id}``."""
- user = self.new_user_ref(domain_id=self.domain_id)
+ user = unit.new_user_ref(domain_id=self.domain_id)
del user['id']
r = self.patch('/users/%(user_id)s' % {
'user_id': self.user['id']},
@@ -287,44 +384,42 @@ class IdentityTestCase(test_v3.RestfulTestCase):
def test_admin_password_reset(self):
# bootstrap a user as admin
- user_ref = self.new_user_ref(domain_id=self.domain['id'])
- password = user_ref['password']
- user_ref = self.identity_api.create_user(user_ref)
+ user_ref = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
# auth as user should work before a password change
old_password_auth = self.build_authentication_request(
user_id=user_ref['id'],
- password=password)
- r = self.v3_authenticate_token(old_password_auth, expected_status=201)
+ password=user_ref['password'])
+ r = self.v3_create_token(old_password_auth)
old_token = r.headers.get('X-Subject-Token')
# auth as user with a token should work before a password change
old_token_auth = self.build_authentication_request(token=old_token)
- self.v3_authenticate_token(old_token_auth, expected_status=201)
+ self.v3_create_token(old_token_auth)
# administrative password reset
new_password = uuid.uuid4().hex
self.patch('/users/%s' % user_ref['id'],
- body={'user': {'password': new_password}},
- expected_status=200)
+ body={'user': {'password': new_password}})
# auth as user with original password should not work after change
- self.v3_authenticate_token(old_password_auth,
- expected_status=http_client.UNAUTHORIZED)
+ self.v3_create_token(old_password_auth,
+ expected_status=http_client.UNAUTHORIZED)
# auth as user with an old token should not work after change
- self.v3_authenticate_token(old_token_auth,
- expected_status=http_client.NOT_FOUND)
+ self.v3_create_token(old_token_auth,
+ expected_status=http_client.NOT_FOUND)
# new password should work
new_password_auth = self.build_authentication_request(
user_id=user_ref['id'],
password=new_password)
- self.v3_authenticate_token(new_password_auth, expected_status=201)
+ self.v3_create_token(new_password_auth)
def test_update_user_domain_id(self):
"""Call ``PATCH /users/{user_id}`` with domain_id."""
- user = self.new_user_ref(domain_id=self.domain['id'])
+ user = unit.new_user_ref(domain_id=self.domain['id'])
user = self.identity_api.create_user(user)
user['domain_id'] = CONF.identity.default_domain_id
r = self.patch('/users/%(user_id)s' % {
@@ -349,18 +444,16 @@ class IdentityTestCase(test_v3.RestfulTestCase):
"""
# First check the credential for this user is present
r = self.credential_api.get_credential(self.credential['id'])
- self.assertDictEqual(r, self.credential)
+ self.assertDictEqual(self.credential, r)
# Create a second credential with a different user
- self.user2 = self.new_user_ref(
- domain_id=self.domain['id'],
- project_id=self.project['id'])
- self.user2 = self.identity_api.create_user(self.user2)
- self.credential2 = self.new_credential_ref(
- user_id=self.user2['id'],
- project_id=self.project['id'])
- self.credential_api.create_credential(
- self.credential2['id'],
- self.credential2)
+
+ user2 = unit.new_user_ref(domain_id=self.domain['id'],
+ project_id=self.project['id'])
+ user2 = self.identity_api.create_user(user2)
+ credential2 = unit.new_credential_ref(user_id=user2['id'],
+ project_id=self.project['id'])
+ self.credential_api.create_credential(credential2['id'], credential2)
+
# Create a token for this user which we can check later
# gets deleted
auth_data = self.build_authentication_request(
@@ -371,7 +464,7 @@ class IdentityTestCase(test_v3.RestfulTestCase):
# Confirm token is valid for now
self.head('/auth/tokens',
headers={'X-Subject-Token': token},
- expected_status=200)
+ expected_status=http_client.OK)
# Now delete the user
self.delete('/users/%(user_id)s' % {
@@ -387,14 +480,57 @@ class IdentityTestCase(test_v3.RestfulTestCase):
self.user['id'])
self.assertEqual(0, len(tokens))
# But the credential for user2 is unaffected
- r = self.credential_api.get_credential(self.credential2['id'])
- self.assertDictEqual(r, self.credential2)
+ r = self.credential_api.get_credential(credential2['id'])
+ self.assertDictEqual(credential2, r)
+
+ # shadow user tests
+ def test_shadow_federated_user(self):
+ fed_user = unit.new_federated_user_ref()
+ user = (
+ self.identity_api.shadow_federated_user(fed_user["idp_id"],
+ fed_user["protocol_id"],
+ fed_user["unique_id"],
+ fed_user["display_name"])
+ )
+ self.assertIsNotNone(user["id"])
+ self.assertEqual(len(user.keys()), 4)
+ self.assertIsNotNone(user['id'])
+ self.assertIsNotNone(user['name'])
+ self.assertIsNone(user['domain_id'])
+ self.assertEqual(user['enabled'], True)
+
+ def test_shadow_existing_federated_user(self):
+ fed_user = unit.new_federated_user_ref()
+
+ # introduce the user to keystone for the first time
+ shadow_user1 = self.identity_api.shadow_federated_user(
+ fed_user["idp_id"],
+ fed_user["protocol_id"],
+ fed_user["unique_id"],
+ fed_user["display_name"])
+ self.assertEqual(fed_user['display_name'], shadow_user1['name'])
+
+ # shadow the user again, with another name to invalidate the cache
+ # internally, this operation causes request to the driver. It should
+ # not fail.
+ fed_user['display_name'] = uuid.uuid4().hex
+ shadow_user2 = self.identity_api.shadow_federated_user(
+ fed_user["idp_id"],
+ fed_user["protocol_id"],
+ fed_user["unique_id"],
+ fed_user["display_name"])
+ self.assertEqual(fed_user['display_name'], shadow_user2['name'])
+ self.assertNotEqual(shadow_user1['name'], shadow_user2['name'])
+
+ # The shadowed users still share the same unique ID.
+ self.assertEqual(shadow_user1['id'], shadow_user2['id'])
# group crud tests
def test_create_group(self):
"""Call ``POST /groups``."""
- ref = self.new_group_ref(domain_id=self.domain_id)
+ # Create a new group to avoid a duplicate check failure
+ ref = unit.new_group_ref(domain_id=self.domain_id)
r = self.post(
'/groups',
body={'group': ref})
@@ -420,7 +556,7 @@ class IdentityTestCase(test_v3.RestfulTestCase):
def test_update_group(self):
"""Call ``PATCH /groups/{group_id}``."""
- group = self.new_group_ref(domain_id=self.domain_id)
+ group = unit.new_group_ref(domain_id=self.domain_id)
del group['id']
r = self.patch('/groups/%(group_id)s' % {
'group_id': self.group_id},
@@ -429,19 +565,17 @@ class IdentityTestCase(test_v3.RestfulTestCase):
def test_update_group_domain_id(self):
"""Call ``PATCH /groups/{group_id}`` with domain_id."""
- group = self.new_group_ref(domain_id=self.domain['id'])
- group = self.identity_api.create_group(group)
- group['domain_id'] = CONF.identity.default_domain_id
+ self.group['domain_id'] = CONF.identity.default_domain_id
r = self.patch('/groups/%(group_id)s' % {
- 'group_id': group['id']},
- body={'group': group},
+ 'group_id': self.group['id']},
+ body={'group': self.group},
expected_status=exception.ValidationError.code)
self.config_fixture.config(domain_id_immutable=False)
- group['domain_id'] = self.domain['id']
+ self.group['domain_id'] = self.domain['id']
r = self.patch('/groups/%(group_id)s' % {
- 'group_id': group['id']},
- body={'group': group})
- self.assertValidGroupResponse(r, group)
+ 'group_id': self.group['id']},
+ body={'group': self.group})
+ self.assertValidGroupResponse(r, self.group)
def test_delete_group(self):
"""Call ``DELETE /groups/{group_id}``."""
@@ -453,7 +587,7 @@ class IdentityTestCase(test_v3.RestfulTestCase):
log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
- ref = self.new_user_ref(domain_id=self.domain_id)
+ ref = unit.new_user_ref(domain_id=self.domain_id)
self.post(
'/users',
body={'user': ref})
@@ -467,108 +601,122 @@ class IdentityTestCase(test_v3.RestfulTestCase):
log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG))
# bootstrap a user as admin
- user_ref = self.new_user_ref(domain_id=self.domain['id'])
- password = user_ref['password']
- user_ref = self.identity_api.create_user(user_ref)
+ user_ref = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+
+ self.assertNotIn(user_ref['password'], log_fix.output)
# administrative password reset
new_password = uuid.uuid4().hex
self.patch('/users/%s' % user_ref['id'],
- body={'user': {'password': new_password}},
- expected_status=200)
+ body={'user': {'password': new_password}})
- self.assertNotIn(password, log_fix.output)
self.assertNotIn(new_password, log_fix.output)
class IdentityV3toV2MethodsTestCase(unit.TestCase):
"""Test users V3 to V2 conversion methods."""
+ def new_user_ref(self, **kwargs):
+ """Construct a bare bones user ref.
+
+ Omits all optional components.
+ """
+ ref = unit.new_user_ref(**kwargs)
+ # description is already omitted
+ del ref['email']
+ del ref['enabled']
+ del ref['password']
+ return ref
+
def setUp(self):
super(IdentityV3toV2MethodsTestCase, self).setUp()
self.load_backends()
- self.user_id = uuid.uuid4().hex
- self.default_project_id = uuid.uuid4().hex
- self.tenant_id = uuid.uuid4().hex
+ user_id = uuid.uuid4().hex
+ project_id = uuid.uuid4().hex
+
# User with only default_project_id in ref
- self.user1 = {'id': self.user_id,
- 'name': self.user_id,
- 'default_project_id': self.default_project_id,
- 'domain_id': CONF.identity.default_domain_id}
+ self.user1 = self.new_user_ref(
+ id=user_id,
+ name=user_id,
+ project_id=project_id,
+ domain_id=CONF.identity.default_domain_id)
# User without default_project_id or tenantId in ref
- self.user2 = {'id': self.user_id,
- 'name': self.user_id,
- 'domain_id': CONF.identity.default_domain_id}
+ self.user2 = self.new_user_ref(
+ id=user_id,
+ name=user_id,
+ domain_id=CONF.identity.default_domain_id)
# User with both tenantId and default_project_id in ref
- self.user3 = {'id': self.user_id,
- 'name': self.user_id,
- 'default_project_id': self.default_project_id,
- 'tenantId': self.tenant_id,
- 'domain_id': CONF.identity.default_domain_id}
+ self.user3 = self.new_user_ref(
+ id=user_id,
+ name=user_id,
+ project_id=project_id,
+ tenantId=project_id,
+ domain_id=CONF.identity.default_domain_id)
# User with only tenantId in ref
- self.user4 = {'id': self.user_id,
- 'name': self.user_id,
- 'tenantId': self.tenant_id,
- 'domain_id': CONF.identity.default_domain_id}
+ self.user4 = self.new_user_ref(
+ id=user_id,
+ name=user_id,
+ tenantId=project_id,
+ domain_id=CONF.identity.default_domain_id)
# Expected result if the user is meant to have a tenantId element
- self.expected_user = {'id': self.user_id,
- 'name': self.user_id,
- 'username': self.user_id,
- 'tenantId': self.default_project_id}
+ self.expected_user = {'id': user_id,
+ 'name': user_id,
+ 'username': user_id,
+ 'tenantId': project_id}
# Expected result if the user is not meant to have a tenantId element
- self.expected_user_no_tenant_id = {'id': self.user_id,
- 'name': self.user_id,
- 'username': self.user_id}
+ self.expected_user_no_tenant_id = {'id': user_id,
+ 'name': user_id,
+ 'username': user_id}
def test_v3_to_v2_user_method(self):
updated_user1 = controller.V2Controller.v3_to_v2_user(self.user1)
self.assertIs(self.user1, updated_user1)
- self.assertDictEqual(self.user1, self.expected_user)
+ self.assertDictEqual(self.expected_user, self.user1)
updated_user2 = controller.V2Controller.v3_to_v2_user(self.user2)
self.assertIs(self.user2, updated_user2)
- self.assertDictEqual(self.user2, self.expected_user_no_tenant_id)
+ self.assertDictEqual(self.expected_user_no_tenant_id, self.user2)
updated_user3 = controller.V2Controller.v3_to_v2_user(self.user3)
self.assertIs(self.user3, updated_user3)
- self.assertDictEqual(self.user3, self.expected_user)
+ self.assertDictEqual(self.expected_user, self.user3)
updated_user4 = controller.V2Controller.v3_to_v2_user(self.user4)
self.assertIs(self.user4, updated_user4)
- self.assertDictEqual(self.user4, self.expected_user_no_tenant_id)
+ self.assertDictEqual(self.expected_user_no_tenant_id, self.user4)
def test_v3_to_v2_user_method_list(self):
user_list = [self.user1, self.user2, self.user3, self.user4]
updated_list = controller.V2Controller.v3_to_v2_user(user_list)
- self.assertEqual(len(updated_list), len(user_list))
+ self.assertEqual(len(user_list), len(updated_list))
for i, ref in enumerate(updated_list):
# Order should not change.
self.assertIs(ref, user_list[i])
- self.assertDictEqual(self.user1, self.expected_user)
- self.assertDictEqual(self.user2, self.expected_user_no_tenant_id)
- self.assertDictEqual(self.user3, self.expected_user)
- self.assertDictEqual(self.user4, self.expected_user_no_tenant_id)
+ self.assertDictEqual(self.expected_user, self.user1)
+ self.assertDictEqual(self.expected_user_no_tenant_id, self.user2)
+ self.assertDictEqual(self.expected_user, self.user3)
+ self.assertDictEqual(self.expected_user_no_tenant_id, self.user4)
class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase):
def setUp(self):
super(UserSelfServiceChangingPasswordsTestCase, self).setUp()
- self.user_ref = self.new_user_ref(domain_id=self.domain['id'])
- password = self.user_ref['password']
- self.user_ref = self.identity_api.create_user(self.user_ref)
- self.user_ref['password'] = password
- self.token = self.get_request_token(self.user_ref['password'], 201)
+ self.user_ref = unit.create_user(self.identity_api,
+ domain_id=self.domain['id'])
+ self.token = self.get_request_token(self.user_ref['password'],
+ http_client.CREATED)
def get_request_token(self, password, expected_status):
auth_data = self.build_authentication_request(
user_id=self.user_ref['id'],
password=password)
- r = self.v3_authenticate_token(auth_data,
- expected_status=expected_status)
+ r = self.v3_create_token(auth_data,
+ expected_status=expected_status)
return r.headers.get('X-Subject-Token')
def change_password(self, expected_status, **kwargs):
@@ -581,27 +729,28 @@ class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase):
def test_changing_password(self):
# original password works
token_id = self.get_request_token(self.user_ref['password'],
- expected_status=201)
+ expected_status=http_client.CREATED)
# original token works
old_token_auth = self.build_authentication_request(token=token_id)
- self.v3_authenticate_token(old_token_auth, expected_status=201)
+ self.v3_create_token(old_token_auth)
# change password
new_password = uuid.uuid4().hex
self.change_password(password=new_password,
original_password=self.user_ref['password'],
- expected_status=204)
+ expected_status=http_client.NO_CONTENT)
# old password fails
self.get_request_token(self.user_ref['password'],
expected_status=http_client.UNAUTHORIZED)
# old token fails
- self.v3_authenticate_token(old_token_auth,
- expected_status=http_client.NOT_FOUND)
+ self.v3_create_token(old_token_auth,
+ expected_status=http_client.NOT_FOUND)
# new password works
- self.get_request_token(new_password, expected_status=201)
+ self.get_request_token(new_password,
+ expected_status=http_client.CREATED)
def test_changing_password_with_missing_original_password_fails(self):
r = self.change_password(password=uuid.uuid4().hex,
@@ -640,7 +789,7 @@ class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase):
new_password = uuid.uuid4().hex
self.change_password(password=new_password,
original_password=self.user_ref['password'],
- expected_status=204)
+ expected_status=http_client.NO_CONTENT)
self.assertNotIn(self.user_ref['password'], log_fix.output)
self.assertNotIn(new_password, log_fix.output)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
index 8794a426..198dffb8 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py
@@ -15,28 +15,36 @@
import copy
import uuid
-from oslo_config import cfg
+import mock
+from oslo_log import versionutils
from oslo_serialization import jsonutils
from pycadf import cadftaxonomy
from six.moves import http_client
from six.moves import urllib
-from keystone.contrib import oauth1
-from keystone.contrib.oauth1 import controllers
-from keystone.contrib.oauth1 import core
+from keystone.contrib.oauth1 import routers
from keystone import exception
+from keystone import oauth1
+from keystone.oauth1 import controllers
+from keystone.oauth1 import core
+from keystone.tests import unit
from keystone.tests.unit.common import test_notifications
+from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
from keystone.tests.unit import test_v3
-CONF = cfg.CONF
+class OAuth1ContribTests(test_v3.RestfulTestCase):
+ @mock.patch.object(versionutils, 'report_deprecated_feature')
+ def test_exception_happens(self, mock_deprecator):
+ routers.OAuth1Extension(mock.ANY)
+ mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY)
+ args, _kwargs = mock_deprecator.call_args
+ self.assertIn("Remove oauth1_extension from", args[1])
-class OAuth1Tests(test_v3.RestfulTestCase):
- EXTENSION_NAME = 'oauth1'
- EXTENSION_TO_ADD = 'oauth1_extension'
+class OAuth1Tests(test_v3.RestfulTestCase):
CONSUMER_URL = '/OS-OAUTH1/consumers'
@@ -140,7 +148,7 @@ class ConsumerCRUDTests(OAuth1Tests):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
resp = self.delete(self.CONSUMER_URL + '/%s' % consumer_id)
- self.assertResponseStatus(resp, 204)
+ self.assertResponseStatus(resp, http_client.NO_CONTENT)
def test_consumer_get(self):
consumer = self._create_single_consumer()
@@ -262,7 +270,7 @@ class OAuthFlowTests(OAuth1Tests):
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=200)
+ resp = self.put(url, body=body, expected_status=http_client.OK)
self.verifier = resp.result['token']['oauth_verifier']
self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier))
self.assertEqual(8, len(self.verifier))
@@ -357,7 +365,7 @@ class AccessTokenCRUDTests(OAuthFlowTests):
resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': self.access_token.key})
- self.assertResponseStatus(resp, 204)
+ self.assertResponseStatus(resp, http_client.NO_CONTENT)
# List access_token should be 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
@@ -388,7 +396,7 @@ class AuthTokenTests(OAuthFlowTests):
self.assertEqual(self.role_id, roles_list[0]['id'])
# verify that the token can perform delegated tasks
- ref = self.new_user_ref(domain_id=self.domain_id)
+ ref = unit.new_user_ref(domain_id=self.domain_id)
r = self.admin_request(path='/v3/users', headers=headers,
method='POST', body={'user': ref})
self.assertValidUserResponse(r, ref)
@@ -400,7 +408,7 @@ class AuthTokenTests(OAuthFlowTests):
resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': self.access_token.key})
- self.assertResponseStatus(resp, 204)
+ self.assertResponseStatus(resp, http_client.NO_CONTENT)
# Check Keystone Token no longer exists
headers = {'X-Subject-Token': self.keystone_token_id,
@@ -415,7 +423,7 @@ class AuthTokenTests(OAuthFlowTests):
consumer_id = self.consumer['key']
resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': consumer_id})
- self.assertResponseStatus(resp, 204)
+ self.assertResponseStatus(resp, http_client.NO_CONTENT)
# List access_token should be 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
@@ -491,7 +499,7 @@ class AuthTokenTests(OAuthFlowTests):
self.keystone_token_id)
def _create_trust_get_token(self):
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.user_id,
project_id=self.project_id,
@@ -534,7 +542,7 @@ class AuthTokenTests(OAuthFlowTests):
def test_oauth_token_cannot_create_new_trust(self):
self.test_oauth_flow()
- ref = self.new_trust_ref(
+ ref = unit.new_trust_ref(
trustor_user_id=self.user_id,
trustee_user_id=self.user_id,
project_id=self.project_id,
@@ -588,6 +596,18 @@ class AuthTokenTests(OAuthFlowTests):
expected_status=http_client.FORBIDDEN)
+class FernetAuthTokenTests(AuthTokenTests):
+
+ def config_overrides(self):
+ super(FernetAuthTokenTests, self).config_overrides()
+ self.config_fixture.config(group='token', provider='fernet')
+ self.useFixture(ksfixtures.KeyRepository(self.config_fixture))
+
+ def test_delete_keystone_tokens_by_consumer_id(self):
+ # NOTE(lbragstad): Fernet tokens are never persisted in the backend.
+ pass
+
+
class MaliciousOAuth1Tests(OAuth1Tests):
def test_bad_consumer_secret(self):
@@ -645,7 +665,7 @@ class MaliciousOAuth1Tests(OAuth1Tests):
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=200)
+ resp = self.put(url, body=body, expected_status=http_client.OK)
verifier = resp.result['token']['oauth_verifier']
self.assertIsNotNone(verifier)
@@ -719,7 +739,7 @@ class MaliciousOAuth1Tests(OAuth1Tests):
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=200)
+ resp = self.put(url, body=body, expected_status=http_client.OK)
self.verifier = resp.result['token']['oauth_verifier']
self.request_token.set_verifier(self.verifier)
@@ -753,7 +773,8 @@ class MaliciousOAuth1Tests(OAuth1Tests):
# NOTE(stevemar): To simulate this error, we remove the Authorization
# header from the post request.
del headers['Authorization']
- self.post(endpoint, headers=headers, expected_status=500)
+ self.post(endpoint, headers=headers,
+ expected_status=http_client.INTERNAL_SERVER_ERROR)
class OAuthNotificationTests(OAuth1Tests,
@@ -800,7 +821,6 @@ class OAuthNotificationTests(OAuth1Tests,
notifications for request token creation, and access token
creation/deletion are emitted.
"""
-
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer_secret = consumer['secret']
@@ -829,7 +849,7 @@ class OAuthNotificationTests(OAuth1Tests,
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
- resp = self.put(url, body=body, expected_status=200)
+ resp = self.put(url, body=body, expected_status=http_client.OK)
self.verifier = resp.result['token']['oauth_verifier']
self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier))
self.assertEqual(8, len(self.verifier))
@@ -858,7 +878,7 @@ class OAuthNotificationTests(OAuth1Tests,
resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': self.access_token.key})
- self.assertResponseStatus(resp, 204)
+ self.assertResponseStatus(resp, http_client.NO_CONTENT)
# Test to ensure the delete access token notification is sent
self._assert_notify_sent(access_key,
@@ -873,7 +893,7 @@ class OAuthNotificationTests(OAuth1Tests,
class OAuthCADFNotificationTests(OAuthNotificationTests):
def setUp(self):
- """Repeat the tests for CADF notifications """
+ """Repeat the tests for CADF notifications."""
super(OAuthCADFNotificationTests, self).setUp()
self.config_fixture.config(notification_format='cadf')
diff --git a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
index 86ced724..5fb5387a 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py
@@ -19,7 +19,7 @@ from six.moves import http_client
from testtools import matchers
from keystone.common import utils
-from keystone.contrib.revoke import model
+from keystone.models import revoke_model
from keystone.tests.unit import test_v3
from keystone.token import provider
@@ -31,8 +31,6 @@ def _future_time_string():
class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
- EXTENSION_NAME = 'revoke'
- EXTENSION_TO_ADD = 'revoke_extension'
JSON_HOME_DATA = {
'http://docs.openstack.org/api/openstack-identity/3/ext/OS-REVOKE/1.0/'
@@ -92,7 +90,7 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
sample['project_id'] = six.text_type(project_id)
before_time = timeutils.utcnow()
self.revoke_api.revoke(
- model.RevokeEvent(project_id=project_id))
+ revoke_model.RevokeEvent(project_id=project_id))
resp = self.get('/OS-REVOKE/events')
events = resp.json_body['events']
@@ -105,7 +103,7 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
sample['domain_id'] = six.text_type(domain_id)
before_time = timeutils.utcnow()
self.revoke_api.revoke(
- model.RevokeEvent(domain_id=domain_id))
+ revoke_model.RevokeEvent(domain_id=domain_id))
resp = self.get('/OS-REVOKE/events')
events = resp.json_body['events']
@@ -127,7 +125,7 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin):
sample['domain_id'] = six.text_type(domain_id)
self.revoke_api.revoke(
- model.RevokeEvent(domain_id=domain_id))
+ revoke_model.RevokeEvent(domain_id=domain_id))
resp = self.get('/OS-REVOKE/events')
events = resp.json_body['events']
diff --git a/keystone-moon/keystone/tests/unit/test_v3_policy.py b/keystone-moon/keystone/tests/unit/test_v3_policy.py
index 538fc565..76a52088 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_policy.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_policy.py
@@ -12,8 +12,10 @@
# License for the specific language governing permissions and limitations
# under the License.
+import json
import uuid
+from keystone.tests import unit
from keystone.tests.unit import test_v3
@@ -22,9 +24,8 @@ class PolicyTestCase(test_v3.RestfulTestCase):
def setUp(self):
super(PolicyTestCase, self).setUp()
- self.policy_id = uuid.uuid4().hex
- self.policy = self.new_policy_ref()
- self.policy['id'] = self.policy_id
+ self.policy = unit.new_policy_ref()
+ self.policy_id = self.policy['id']
self.policy_api.create_policy(
self.policy_id,
self.policy.copy())
@@ -33,10 +34,8 @@ class PolicyTestCase(test_v3.RestfulTestCase):
def test_create_policy(self):
"""Call ``POST /policies``."""
- ref = self.new_policy_ref()
- r = self.post(
- '/policies',
- body={'policy': ref})
+ ref = unit.new_policy_ref()
+ r = self.post('/policies', body={'policy': ref})
return self.assertValidPolicyResponse(r, ref)
def test_list_policies(self):
@@ -47,22 +46,18 @@ class PolicyTestCase(test_v3.RestfulTestCase):
def test_get_policy(self):
"""Call ``GET /policies/{policy_id}``."""
r = self.get(
- '/policies/%(policy_id)s' % {
- 'policy_id': self.policy_id})
+ '/policies/%(policy_id)s' % {'policy_id': self.policy_id})
self.assertValidPolicyResponse(r, self.policy)
def test_update_policy(self):
"""Call ``PATCH /policies/{policy_id}``."""
- policy = self.new_policy_ref()
- policy['id'] = self.policy_id
+ self.policy['blob'] = json.dumps({'data': uuid.uuid4().hex, })
r = self.patch(
- '/policies/%(policy_id)s' % {
- 'policy_id': self.policy_id},
- body={'policy': policy})
- self.assertValidPolicyResponse(r, policy)
+ '/policies/%(policy_id)s' % {'policy_id': self.policy_id},
+ body={'policy': self.policy})
+ self.assertValidPolicyResponse(r, self.policy)
def test_delete_policy(self):
"""Call ``DELETE /policies/{policy_id}``."""
self.delete(
- '/policies/%(policy_id)s' % {
- 'policy_id': self.policy_id})
+ '/policies/%(policy_id)s' % {'policy_id': self.policy_id})
diff --git a/keystone-moon/keystone/tests/unit/test_v3_protection.py b/keystone-moon/keystone/tests/unit/test_v3_protection.py
index 9922ae5e..f77a1528 100644
--- a/keystone-moon/keystone/tests/unit/test_v3_protection.py
+++ b/keystone-moon/keystone/tests/unit/test_v3_protection.py
@@ -20,19 +20,22 @@ from oslo_serialization import jsonutils
from six.moves import http_client
from keystone import exception
-from keystone.policy.backends import rules
from keystone.tests import unit
+from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
from keystone.tests.unit import test_v3
+from keystone.tests.unit import utils
CONF = cfg.CONF
-DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
class IdentityTestProtectedCase(test_v3.RestfulTestCase):
"""Test policy enforcement on the v3 Identity API."""
+ def _policy_fixture(self):
+ return ksfixtures.Policy(self.tmpfilename, self.config_fixture)
+
def setUp(self):
"""Setup for Identity Protection Test Cases.
@@ -49,14 +52,9 @@ class IdentityTestProtectedCase(test_v3.RestfulTestCase):
the default domain.
"""
- # Ensure that test_v3.RestfulTestCase doesn't load its own
- # sample data, which would make checking the results of our
- # tests harder
- super(IdentityTestProtectedCase, self).setUp()
self.tempfile = self.useFixture(temporaryfile.SecureTempFile())
self.tmpfilename = self.tempfile.file_name
- self.config_fixture.config(group='oslo_policy',
- policy_file=self.tmpfilename)
+ super(IdentityTestProtectedCase, self).setUp()
# A default auth request we can use - un-scoped user token
self.auth = self.build_authentication_request(
@@ -66,45 +64,33 @@ class IdentityTestProtectedCase(test_v3.RestfulTestCase):
def load_sample_data(self):
self._populate_default_domain()
# Start by creating a couple of domains
- self.domainA = self.new_domain_ref()
+ self.domainA = unit.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = self.new_domain_ref()
+ self.domainB = unit.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.domainC = self.new_domain_ref()
- self.domainC['enabled'] = False
+ self.domainC = unit.new_domain_ref(enabled=False)
self.resource_api.create_domain(self.domainC['id'], self.domainC)
# Now create some users, one in domainA and two of them in domainB
- self.user1 = self.new_user_ref(domain_id=self.domainA['id'])
- password = uuid.uuid4().hex
- self.user1['password'] = password
- self.user1 = self.identity_api.create_user(self.user1)
- self.user1['password'] = password
-
- self.user2 = self.new_user_ref(domain_id=self.domainB['id'])
- password = uuid.uuid4().hex
- self.user2['password'] = password
- self.user2 = self.identity_api.create_user(self.user2)
- self.user2['password'] = password
-
- self.user3 = self.new_user_ref(domain_id=self.domainB['id'])
- password = uuid.uuid4().hex
- self.user3['password'] = password
- self.user3 = self.identity_api.create_user(self.user3)
- self.user3['password'] = password
-
- self.group1 = self.new_group_ref(domain_id=self.domainA['id'])
+ self.user1 = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
+ self.user2 = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
+ self.user3 = unit.create_user(self.identity_api,
+ domain_id=self.domainB['id'])
+
+ self.group1 = unit.new_group_ref(domain_id=self.domainA['id'])
self.group1 = self.identity_api.create_group(self.group1)
- self.group2 = self.new_group_ref(domain_id=self.domainA['id'])
+ self.group2 = unit.new_group_ref(domain_id=self.domainA['id'])
self.group2 = self.identity_api.create_group(self.group2)
- self.group3 = self.new_group_ref(domain_id=self.domainB['id'])
+ self.group3 = unit.new_group_ref(domain_id=self.domainB['id'])
self.group3 = self.identity_api.create_group(self.group3)
- self.role = self.new_role_ref()
+ self.role = unit.new_role_ref()
self.role_api.create_role(self.role['id'], self.role)
- self.role1 = self.new_role_ref()
+ self.role1 = unit.new_role_ref()
self.role_api.create_role(self.role1['id'], self.role1)
self.assignment_api.create_grant(self.role['id'],
user_id=self.user1['id'],
@@ -348,34 +334,23 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
def load_sample_data(self):
self._populate_default_domain()
- self.just_a_user = self.new_user_ref(
+ self.just_a_user = unit.create_user(
+ self.identity_api,
domain_id=CONF.identity.default_domain_id)
- password = uuid.uuid4().hex
- self.just_a_user['password'] = password
- self.just_a_user = self.identity_api.create_user(self.just_a_user)
- self.just_a_user['password'] = password
-
- self.another_user = self.new_user_ref(
+ self.another_user = unit.create_user(
+ self.identity_api,
domain_id=CONF.identity.default_domain_id)
- password = uuid.uuid4().hex
- self.another_user['password'] = password
- self.another_user = self.identity_api.create_user(self.another_user)
- self.another_user['password'] = password
-
- self.admin_user = self.new_user_ref(
+ self.admin_user = unit.create_user(
+ self.identity_api,
domain_id=CONF.identity.default_domain_id)
- password = uuid.uuid4().hex
- self.admin_user['password'] = password
- self.admin_user = self.identity_api.create_user(self.admin_user)
- self.admin_user['password'] = password
- self.role = self.new_role_ref()
+ self.role = unit.new_role_ref()
self.role_api.create_role(self.role['id'], self.role)
- self.admin_role = {'id': uuid.uuid4().hex, 'name': 'admin'}
+ self.admin_role = unit.new_role_ref(name='admin')
self.role_api.create_role(self.admin_role['id'], self.admin_role)
# Create and assign roles to the project
- self.project = self.new_project_ref(
+ self.project = unit.new_project_ref(
domain_id=CONF.identity.default_domain_id)
self.resource_api.create_project(self.project['id'], self.project)
self.assignment_api.create_grant(self.role['id'],
@@ -461,7 +436,8 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
token = self.get_requested_token(auth)
self.head('/auth/tokens', token=token,
- headers={'X-Subject-Token': token}, expected_status=200)
+ headers={'X-Subject-Token': token},
+ expected_status=http_client.OK)
def test_user_check_user_token(self):
# A user can check one of their own tokens.
@@ -474,7 +450,8 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
token2 = self.get_requested_token(auth)
self.head('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2}, expected_status=200)
+ headers={'X-Subject-Token': token2},
+ expected_status=http_client.OK)
def test_user_check_other_user_token_rejected(self):
# A user cannot check another user's token.
@@ -510,7 +487,8 @@ class IdentityTestPolicySample(test_v3.RestfulTestCase):
user_token = self.get_requested_token(user_auth)
self.head('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token}, expected_status=200)
+ headers={'X-Subject-Token': user_token},
+ expected_status=http_client.OK)
def test_user_revoke_same_token(self):
# Given a non-admin user token, the token can be used to revoke
@@ -579,6 +557,10 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Test policy enforcement of the sample v3 cloud policy file."""
+ def _policy_fixture(self):
+ return ksfixtures.Policy(unit.dirs.etc('policy.v3cloudsample.json'),
+ self.config_fixture)
+
def setUp(self):
"""Setup for v3 Cloud Policy Sample Test Cases.
@@ -592,8 +574,8 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
- domain_admin_user has role 'admin' on domainA,
- project_admin_user has role 'admin' on the project,
- just_a_user has a non-admin role on both domainA and the project.
- - admin_domain has user cloud_admin_user, with an 'admin' role
- on admin_domain.
+ - admin_domain has admin_project, and user cloud_admin_user, with an
+ 'admin' role on admin_project.
We test various api protection rules from the cloud sample policy
file to make sure the sample is valid and that we correctly enforce it.
@@ -604,62 +586,61 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
# tests harder
super(IdentityTestv3CloudPolicySample, self).setUp()
- # Finally, switch to the v3 sample policy file
- self.addCleanup(rules.reset)
- rules.reset()
self.config_fixture.config(
- group='oslo_policy',
- policy_file=unit.dirs.etc('policy.v3cloudsample.json'))
+ group='resource',
+ admin_project_name=self.admin_project['name'])
+ self.config_fixture.config(
+ group='resource',
+ admin_project_domain_name=self.admin_domain['name'])
def load_sample_data(self):
# Start by creating a couple of domains
self._populate_default_domain()
- self.domainA = self.new_domain_ref()
+ self.domainA = unit.new_domain_ref()
self.resource_api.create_domain(self.domainA['id'], self.domainA)
- self.domainB = self.new_domain_ref()
+ self.domainB = unit.new_domain_ref()
self.resource_api.create_domain(self.domainB['id'], self.domainB)
- self.admin_domain = {'id': 'admin_domain_id', 'name': 'Admin_domain'}
+ self.admin_domain = unit.new_domain_ref()
self.resource_api.create_domain(self.admin_domain['id'],
self.admin_domain)
+ self.admin_project = unit.new_project_ref(
+ domain_id=self.admin_domain['id'])
+ self.resource_api.create_project(self.admin_project['id'],
+ self.admin_project)
+
# And our users
- self.cloud_admin_user = self.new_user_ref(
+ self.cloud_admin_user = unit.create_user(
+ self.identity_api,
domain_id=self.admin_domain['id'])
- password = uuid.uuid4().hex
- self.cloud_admin_user['password'] = password
- self.cloud_admin_user = (
- self.identity_api.create_user(self.cloud_admin_user))
- self.cloud_admin_user['password'] = password
- self.just_a_user = self.new_user_ref(domain_id=self.domainA['id'])
- password = uuid.uuid4().hex
- self.just_a_user['password'] = password
- self.just_a_user = self.identity_api.create_user(self.just_a_user)
- self.just_a_user['password'] = password
- self.domain_admin_user = self.new_user_ref(
+ self.just_a_user = unit.create_user(
+ self.identity_api,
domain_id=self.domainA['id'])
- password = uuid.uuid4().hex
- self.domain_admin_user['password'] = password
- self.domain_admin_user = (
- self.identity_api.create_user(self.domain_admin_user))
- self.domain_admin_user['password'] = password
- self.project_admin_user = self.new_user_ref(
+ self.domain_admin_user = unit.create_user(
+ self.identity_api,
+ domain_id=self.domainA['id'])
+ self.domainB_admin_user = unit.create_user(
+ self.identity_api,
+ domain_id=self.domainB['id'])
+ self.project_admin_user = unit.create_user(
+ self.identity_api,
domain_id=self.domainA['id'])
- password = uuid.uuid4().hex
- self.project_admin_user['password'] = password
- self.project_admin_user = (
- self.identity_api.create_user(self.project_admin_user))
- self.project_admin_user['password'] = password
-
- # The admin role and another plain role
- self.admin_role = {'id': uuid.uuid4().hex, 'name': 'admin'}
+ self.project_adminB_user = unit.create_user(
+ self.identity_api,
+ domain_id=self.domainB['id'])
+
+ # The admin role, a domain specific role and another plain role
+ self.admin_role = unit.new_role_ref(name='admin')
self.role_api.create_role(self.admin_role['id'], self.admin_role)
- self.role = self.new_role_ref()
+ self.roleA = unit.new_role_ref(domain_id=self.domainA['id'])
+ self.role_api.create_role(self.roleA['id'], self.roleA)
+ self.role = unit.new_role_ref()
self.role_api.create_role(self.role['id'], self.role)
- # The cloud admin just gets the admin role
+ # The cloud admin just gets the admin role on the special admin project
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.cloud_admin_user['id'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
# Assign roles to the domain
self.assignment_api.create_grant(self.admin_role['id'],
@@ -668,13 +649,21 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.assignment_api.create_grant(self.role['id'],
user_id=self.just_a_user['id'],
domain_id=self.domainA['id'])
+ self.assignment_api.create_grant(self.admin_role['id'],
+ user_id=self.domainB_admin_user['id'],
+ domain_id=self.domainB['id'])
# Create and assign roles to the project
- self.project = self.new_project_ref(domain_id=self.domainA['id'])
+ self.project = unit.new_project_ref(domain_id=self.domainA['id'])
self.resource_api.create_project(self.project['id'], self.project)
+ self.projectB = unit.new_project_ref(domain_id=self.domainB['id'])
+ self.resource_api.create_project(self.projectB['id'], self.projectB)
self.assignment_api.create_grant(self.admin_role['id'],
user_id=self.project_admin_user['id'],
project_id=self.project['id'])
+ self.assignment_api.create_grant(
+ self.admin_role['id'], user_id=self.project_adminB_user['id'],
+ project_id=self.projectB['id'])
self.assignment_api.create_grant(self.role['id'],
user_id=self.just_a_user['id'],
project_id=self.project['id'])
@@ -683,7 +672,8 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
# Return the expected return codes for APIs with and without data
# with any specified status overriding the normal values
if expected_status is None:
- return (200, 201, 204)
+ return (http_client.OK, http_client.CREATED,
+ http_client.NO_CONTENT)
else:
return (expected_status, expected_status, expected_status)
@@ -702,7 +692,7 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
- user_ref = self.new_user_ref(domain_id=domain_id)
+ user_ref = unit.new_user_ref(domain_id=domain_id)
self.post('/users', auth=self.auth, body={'user': user_ref},
expected_status=status_created)
@@ -721,7 +711,7 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
- proj_ref = self.new_project_ref(domain_id=domain_id)
+ proj_ref = unit.new_project_ref(domain_id=domain_id)
self.post('/projects', auth=self.auth, body={'project': proj_ref},
expected_status=status_created)
@@ -740,13 +730,14 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.delete(entity_url, auth=self.auth,
expected_status=status_no_data)
- domain_ref = self.new_domain_ref()
+ domain_ref = unit.new_domain_ref()
self.post('/domains', auth=self.auth, body={'domain': domain_ref},
expected_status=status_created)
- def _test_grants(self, target, entity_id, expected=None):
+ def _test_grants(self, target, entity_id, role_domain_id=None,
+ list_status_OK=False, expected=None):
status_OK, status_created, status_no_data = self._stati(expected)
- a_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
+ a_role = unit.new_role_ref(domain_id=role_domain_id)
self.role_api.create_role(a_role['id'], a_role)
collection_url = (
@@ -762,11 +753,67 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
expected_status=status_no_data)
self.head(member_url, auth=self.auth,
expected_status=status_no_data)
- self.get(collection_url, auth=self.auth,
- expected_status=status_OK)
+ if list_status_OK:
+ self.get(collection_url, auth=self.auth)
+ else:
+ self.get(collection_url, auth=self.auth,
+ expected_status=status_OK)
self.delete(member_url, auth=self.auth,
expected_status=status_no_data)
+ def _role_management_cases(self, read_status_OK=False, expected=None):
+ # Set the different status values for different types of call depending
+ # on whether we expect the calls to fail or not.
+ status_OK, status_created, status_no_data = self._stati(expected)
+ entity_url = '/roles/%s' % self.role['id']
+ list_url = '/roles'
+
+ if read_status_OK:
+ self.get(entity_url, auth=self.auth)
+ self.get(list_url, auth=self.auth)
+ else:
+ self.get(entity_url, auth=self.auth,
+ expected_status=status_OK)
+ self.get(list_url, auth=self.auth,
+ expected_status=status_OK)
+
+ role = {'name': 'Updated'}
+ self.patch(entity_url, auth=self.auth, body={'role': role},
+ expected_status=status_OK)
+ self.delete(entity_url, auth=self.auth,
+ expected_status=status_no_data)
+
+ role_ref = unit.new_role_ref()
+ self.post('/roles', auth=self.auth, body={'role': role_ref},
+ expected_status=status_created)
+
+ def _domain_role_management_cases(self, domain_id, read_status_OK=False,
+ expected=None):
+ # Set the different status values for different types of call depending
+ # on whether we expect the calls to fail or not.
+ status_OK, status_created, status_no_data = self._stati(expected)
+ entity_url = '/roles/%s' % self.roleA['id']
+ list_url = '/roles?domain_id=%s' % domain_id
+
+ if read_status_OK:
+ self.get(entity_url, auth=self.auth)
+ self.get(list_url, auth=self.auth)
+ else:
+ self.get(entity_url, auth=self.auth,
+ expected_status=status_OK)
+ self.get(list_url, auth=self.auth,
+ expected_status=status_OK)
+
+ role = {'name': 'Updated'}
+ self.patch(entity_url, auth=self.auth, body={'role': role},
+ expected_status=status_OK)
+ self.delete(entity_url, auth=self.auth,
+ expected_status=status_no_data)
+
+ role_ref = unit.new_role_ref(domain_id=domain_id)
+ self.post('/roles', auth=self.auth, body={'role': role_ref},
+ expected_status=status_created)
+
def test_user_management(self):
# First, authenticate with a user that does not have the domain
# admin role - shouldn't be able to do much.
@@ -786,13 +833,90 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self._test_user_management(self.domainA['id'])
+ def test_user_management_normalized_keys(self):
+ """Illustrate the inconsistent handling of hyphens in keys.
+
+ To quote Morgan in bug 1526244:
+
+ the reason this is converted from "domain-id" to "domain_id" is
+ because of how we process/normalize data. The way we have to handle
+ specific data types for known columns requires avoiding "-" in the
+ actual python code since "-" is not valid for attributes in python
+ w/o significant use of "getattr" etc.
+
+ In short, historically we handle some things in conversions. The
+ use of "extras" has long been a poor design choice that leads to
+ odd/strange inconsistent behaviors because of other choices made in
+ handling data from within the body. (In many cases we convert from
+ "-" to "_" throughout openstack)
+
+ Source: https://bugs.launchpad.net/keystone/+bug/1526244/comments/9
+
+ """
+ # Authenticate with a user that has the domain admin role
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ # Show that we can read a normal user without any surprises.
+ r = self.get(
+ '/users/%s' % self.just_a_user['id'],
+ auth=self.auth,
+ expected_status=http_client.OK)
+ self.assertValidUserResponse(r)
+
+ # We don't normalize query string keys, so both of these result in a
+ # 403, because we didn't specify a domain_id query string in either
+ # case, and we explicitly require one (it doesn't matter what
+ # 'domain-id' value you use).
+ self.get(
+ '/users?domain-id=%s' % self.domainA['id'],
+ auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+ self.get(
+ '/users?domain-id=%s' % self.domainB['id'],
+ auth=self.auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ # If we try updating the user's 'domain_id' by specifying a
+ # 'domain-id', then it'll be stored into extras rather than normalized,
+ # and the user's actual 'domain_id' is not affected.
+ r = self.patch(
+ '/users/%s' % self.just_a_user['id'],
+ auth=self.auth,
+ body={'user': {'domain-id': self.domainB['id']}},
+ expected_status=http_client.OK)
+ self.assertEqual(self.domainB['id'], r.json['user']['domain-id'])
+ self.assertEqual(self.domainA['id'], r.json['user']['domain_id'])
+ self.assertNotEqual(self.domainB['id'], self.just_a_user['domain_id'])
+ self.assertValidUserResponse(r, self.just_a_user)
+
+ # Finally, show that we can create a new user without any surprises.
+ # But if we specify a 'domain-id' instead of a 'domain_id', we get a
+ # Forbidden response because we fail a policy check before
+ # normalization occurs.
+ user_ref = unit.new_user_ref(domain_id=self.domainA['id'])
+ r = self.post(
+ '/users',
+ auth=self.auth,
+ body={'user': user_ref},
+ expected_status=http_client.CREATED)
+ self.assertValidUserResponse(r, ref=user_ref)
+ user_ref['domain-id'] = user_ref.pop('domain_id')
+ self.post(
+ '/users',
+ auth=self.auth,
+ body={'user': user_ref},
+ expected_status=exception.ForbiddenAction.code)
+
def test_user_management_by_cloud_admin(self):
# Test users management with a cloud admin. This user should
# be able to manage users in any domain.
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
self._test_user_management(self.domainA['id'])
@@ -824,7 +948,7 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
# Check whether cloud admin can operate a domain
# other than its own domain or not
@@ -858,10 +982,56 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
self._test_grants('domains', self.domainA['id'])
+ def test_domain_grants_by_cloud_admin_for_domain_specific_role(self):
+ # Test domain grants with a cloud admin. This user should be
+ # able to manage domain roles on any domain.
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ project_id=self.admin_project['id'])
+
+ self._test_grants('domains', self.domainA['id'],
+ role_domain_id=self.domainB['id'])
+
+ def test_domain_grants_by_non_admin_for_domain_specific_role(self):
+ # A non-admin shouldn't be able to do anything
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_grants('domains', self.domainA['id'],
+ role_domain_id=self.domainA['id'],
+ expected=exception.ForbiddenAction.code)
+ self._test_grants('domains', self.domainA['id'],
+ role_domain_id=self.domainB['id'],
+ expected=exception.ForbiddenAction.code)
+
+ def test_domain_grants_by_domain_admin_for_domain_specific_role(self):
+ # Authenticate with a user that does have the domain admin role,
+ # should not be able to assign a domain_specific role from another
+ # domain
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_grants('domains', self.domainA['id'],
+ role_domain_id=self.domainB['id'],
+ # List status will always be OK, since we are not
+ # granting/checking/deleting assignments
+ list_status_OK=True,
+ expected=exception.ForbiddenAction.code)
+
+ # They should be able to assign a domain specific role from the same
+ # domain
+ self._test_grants('domains', self.domainA['id'],
+ role_domain_id=self.domainA['id'])
+
def test_project_grants(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
@@ -890,11 +1060,67 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self._test_grants('projects', self.project['id'])
+ def test_project_grants_by_non_admin_for_domain_specific_role(self):
+ # A non-admin shouldn't be able to do anything
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ project_id=self.project['id'])
+
+ self._test_grants('projects', self.project['id'],
+ role_domain_id=self.domainA['id'],
+ expected=exception.ForbiddenAction.code)
+ self._test_grants('projects', self.project['id'],
+ role_domain_id=self.domainB['id'],
+ expected=exception.ForbiddenAction.code)
+
+ def test_project_grants_by_project_admin_for_domain_specific_role(self):
+ # Authenticate with a user that does have the project admin role,
+ # should not be able to assign a domain_specific role from another
+ # domain
+ self.auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ self._test_grants('projects', self.project['id'],
+ role_domain_id=self.domainB['id'],
+ # List status will always be OK, since we are not
+ # granting/checking/deleting assignments
+ list_status_OK=True,
+ expected=exception.ForbiddenAction.code)
+
+ # They should be able to assign a domain specific role from the same
+ # domain
+ self._test_grants('projects', self.project['id'],
+ role_domain_id=self.domainA['id'])
+
+ def test_project_grants_by_domain_admin_for_domain_specific_role(self):
+ # Authenticate with a user that does have the domain admin role,
+ # should not be able to assign a domain_specific role from another
+ # domain
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._test_grants('projects', self.project['id'],
+ role_domain_id=self.domainB['id'],
+ # List status will always be OK, since we are not
+ # granting/checking/deleting assignments
+ list_status_OK=True,
+ expected=exception.ForbiddenAction.code)
+
+ # They should be able to assign a domain specific role from the same
+ # domain
+ self._test_grants('projects', self.project['id'],
+ role_domain_id=self.domainA['id'])
+
def test_cloud_admin_list_assignments_of_domain(self):
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
collection_url = self.build_role_assignment_query_url(
domain_id=self.domainA['id'])
@@ -968,7 +1194,7 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
collection_url = self.build_role_assignment_query_url(
project_id=self.project['id'])
@@ -990,7 +1216,33 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.assertRoleAssignmentInListResponse(r, project_admin_entity)
self.assertRoleAssignmentInListResponse(r, project_user_entity)
- @unit.utils.wip('waiting on bug #1437407')
+ def test_admin_project_list_assignments_of_project(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ project_id=self.project['id'])
+ r = self.get(collection_url, auth=self.auth)
+ self.assertValidRoleAssignmentListResponse(
+ r, expected_length=2, resource_url=collection_url)
+
+ project_admin_entity = self.build_role_assignment_entity(
+ project_id=self.project['id'],
+ user_id=self.project_admin_user['id'],
+ role_id=self.admin_role['id'],
+ inherited_to_projects=False)
+ project_user_entity = self.build_role_assignment_entity(
+ project_id=self.project['id'],
+ user_id=self.just_a_user['id'],
+ role_id=self.role['id'],
+ inherited_to_projects=False)
+
+ self.assertRoleAssignmentInListResponse(r, project_admin_entity)
+ self.assertRoleAssignmentInListResponse(r, project_user_entity)
+
+ @utils.wip('waiting on bug #1437407')
def test_domain_admin_list_assignments_of_project(self):
self.auth = self.build_authentication_request(
user_id=self.domain_admin_user['id'],
@@ -1017,6 +1269,53 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.assertRoleAssignmentInListResponse(r, project_admin_entity)
self.assertRoleAssignmentInListResponse(r, project_user_entity)
+ def test_domain_admin_list_assignment_tree(self):
+ # Add a child project to the standard test data
+ sub_project = unit.new_project_ref(domain_id=self.domainA['id'],
+ parent_id=self.project['id'])
+ self.resource_api.create_project(sub_project['id'], sub_project)
+ self.assignment_api.create_grant(self.role['id'],
+ user_id=self.just_a_user['id'],
+ project_id=sub_project['id'])
+
+ collection_url = self.build_role_assignment_query_url(
+ project_id=self.project['id'])
+ collection_url += '&include_subtree=True'
+
+ # The domain admin should be able to list the assignment tree
+ auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ r = self.get(collection_url, auth=auth)
+ self.assertValidRoleAssignmentListResponse(
+ r, expected_length=3, resource_url=collection_url)
+
+ # A project admin should not be able to
+ auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ r = self.get(collection_url, auth=auth,
+ expected_status=http_client.FORBIDDEN)
+
+ # A neither should a domain admin from a different domain
+ domainB_admin_user = unit.create_user(
+ self.identity_api,
+ domain_id=self.domainB['id'])
+ self.assignment_api.create_grant(self.admin_role['id'],
+ user_id=domainB_admin_user['id'],
+ domain_id=self.domainB['id'])
+ auth = self.build_authentication_request(
+ user_id=domainB_admin_user['id'],
+ password=domainB_admin_user['password'],
+ domain_id=self.domainB['id'])
+
+ r = self.get(collection_url, auth=auth,
+ expected_status=http_client.FORBIDDEN)
+
def test_domain_user_list_assignments_of_project_failed(self):
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
@@ -1040,7 +1339,23 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
+
+ self._test_domain_management()
+
+ def test_admin_project(self):
+ self.auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ self._test_domain_management(
+ expected=exception.ForbiddenAction.code)
+
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ project_id=self.admin_project['id'])
self._test_domain_management()
@@ -1050,16 +1365,15 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
password=self.domain_admin_user['password'],
domain_id=self.domainA['id'])
entity_url = '/domains/%s' % self.domainA['id']
- self.get(entity_url, auth=self.auth, expected_status=200)
+ self.get(entity_url, auth=self.auth)
def test_list_user_credentials(self):
- self.credential_user = self.new_credential_ref(self.just_a_user['id'])
- self.credential_api.create_credential(self.credential_user['id'],
- self.credential_user)
- self.credential_admin = self.new_credential_ref(
- self.cloud_admin_user['id'])
- self.credential_api.create_credential(self.credential_admin['id'],
- self.credential_admin)
+ credential_user = unit.new_credential_ref(self.just_a_user['id'])
+ self.credential_api.create_credential(credential_user['id'],
+ credential_user)
+ credential_admin = unit.new_credential_ref(self.cloud_admin_user['id'])
+ self.credential_api.create_credential(credential_admin['id'],
+ credential_admin)
self.auth = self.build_authentication_request(
user_id=self.just_a_user['id'],
@@ -1075,9 +1389,8 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
def test_get_and_delete_ec2_credentials(self):
"""Tests getting and deleting ec2 credentials through the ec2 API."""
- another_user = self.new_user_ref(domain_id=self.domainA['id'])
- password = another_user['password']
- another_user = self.identity_api.create_user(another_user)
+ another_user = unit.create_user(self.identity_api,
+ domain_id=self.domainA['id'])
# create a credential for just_a_user
just_user_auth = self.build_authentication_request(
@@ -1091,7 +1404,7 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
# another normal user can't get the credential
another_user_auth = self.build_authentication_request(
user_id=another_user['id'],
- password=password)
+ password=another_user['password'])
another_user_url = '/users/%s/credentials/OS-EC2/%s' % (
another_user['id'], r.result['credential']['access'])
self.get(another_user_url, auth=another_user_auth,
@@ -1160,7 +1473,26 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
admin_auth = self.build_authentication_request(
user_id=self.cloud_admin_user['id'],
password=self.cloud_admin_user['password'],
- domain_id=self.admin_domain['id'])
+ project_id=self.admin_project['id'])
+ admin_token = self.get_requested_token(admin_auth)
+
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'])
+ user_token = self.get_requested_token(user_auth)
+
+ self.get('/auth/tokens', token=admin_token,
+ headers={'X-Subject-Token': user_token})
+
+ def test_admin_project_validate_user_token(self):
+ # An admin can validate a user's token.
+ # This is GET /v3/auth/tokens
+
+ admin_auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
admin_token = self.get_requested_token(admin_auth)
user_auth = self.build_authentication_request(
@@ -1182,7 +1514,8 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
token = self.get_requested_token(auth)
self.head('/auth/tokens', token=token,
- headers={'X-Subject-Token': token}, expected_status=200)
+ headers={'X-Subject-Token': token},
+ expected_status=http_client.OK)
def test_user_check_user_token(self):
# A user can check one of their own tokens.
@@ -1195,7 +1528,8 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
token2 = self.get_requested_token(auth)
self.head('/auth/tokens', token=token1,
- headers={'X-Subject-Token': token2}, expected_status=200)
+ headers={'X-Subject-Token': token2},
+ expected_status=http_client.OK)
def test_user_check_other_user_token_rejected(self):
# A user cannot check another user's token.
@@ -1231,7 +1565,8 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
user_token = self.get_requested_token(user_auth)
self.head('/auth/tokens', token=admin_token,
- headers={'X-Subject-Token': user_token}, expected_status=200)
+ headers={'X-Subject-Token': user_token},
+ expected_status=http_client.OK)
def test_user_revoke_same_token(self):
# Given a non-admin user token, the token can be used to revoke
@@ -1294,3 +1629,149 @@ class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase,
self.delete('/auth/tokens', token=admin_token,
headers={'X-Subject-Token': user_token})
+
+ def test_user_with_a_role_get_project(self):
+ user_auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ project_id=self.project['id'])
+
+ # Test user can get project for one they have a role in
+ self.get('/projects/%s' % self.project['id'], auth=user_auth)
+
+ # Test user can not get project for one they don't have a role in,
+ # even if they have a role on another project
+ project2 = unit.new_project_ref(domain_id=self.domainA['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ self.get('/projects/%s' % project2['id'], auth=user_auth,
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_project_admin_get_project(self):
+ admin_auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ resp = self.get('/projects/%s' % self.project['id'], auth=admin_auth)
+ self.assertEqual(self.project['id'],
+ jsonutils.loads(resp.body)['project']['id'])
+
+ def test_role_management_no_admin_no_rights(self):
+ # A non-admin domain user shouldn't be able to manipulate roles
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._role_management_cases(expected=exception.ForbiddenAction.code)
+
+ # ...and nor should non-admin project user
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ project_id=self.project['id'])
+
+ self._role_management_cases(expected=exception.ForbiddenAction.code)
+
+ def test_role_management_with_project_admin(self):
+ # A project admin user should be able to get and list, but not be able
+ # to create/update/delete global roles
+ self.auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ self._role_management_cases(read_status_OK=True,
+ expected=exception.ForbiddenAction.code)
+
+ def test_role_management_with_domain_admin(self):
+ # A domain admin user should be able to get and list, but not be able
+ # to create/update/delete global roles
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._role_management_cases(read_status_OK=True,
+ expected=exception.ForbiddenAction.code)
+
+ def test_role_management_with_cloud_admin(self):
+ # A cloud admin user should have rights to manipulate global roles
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ project_id=self.admin_project['id'])
+
+ self._role_management_cases()
+
+ def test_domain_role_management_no_admin_no_rights(self):
+ # A non-admin domain user shouldn't be able to manipulate domain roles
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._domain_role_management_cases(
+ self.domainA['id'], expected=exception.ForbiddenAction.code)
+
+ # ...and nor should non-admin project user
+ self.auth = self.build_authentication_request(
+ user_id=self.just_a_user['id'],
+ password=self.just_a_user['password'],
+ project_id=self.project['id'])
+
+ self._domain_role_management_cases(
+ self.domainA['id'], expected=exception.ForbiddenAction.code)
+
+ def test_domain_role_management_with_cloud_admin(self):
+ # A cloud admin user should have rights to manipulate domain roles
+ self.auth = self.build_authentication_request(
+ user_id=self.cloud_admin_user['id'],
+ password=self.cloud_admin_user['password'],
+ project_id=self.admin_project['id'])
+
+ self._domain_role_management_cases(self.domainA['id'])
+
+ def test_domain_role_management_with_domain_admin(self):
+ # A domain admin user should only be able to manipulate the domain
+ # specific roles in their own domain
+ self.auth = self.build_authentication_request(
+ user_id=self.domainB_admin_user['id'],
+ password=self.domainB_admin_user['password'],
+ domain_id=self.domainB['id'])
+
+ # Try to access the domain specific roles in another domain
+ self._domain_role_management_cases(
+ self.domainA['id'], expected=exception.ForbiddenAction.code)
+
+ # ...but they should be able to work with those in their own domain
+ self.auth = self.build_authentication_request(
+ user_id=self.domain_admin_user['id'],
+ password=self.domain_admin_user['password'],
+ domain_id=self.domainA['id'])
+
+ self._domain_role_management_cases(self.domainA['id'])
+
+ def test_domain_role_management_with_project_admin(self):
+ # A project admin user should have not access to domain specific roles
+ # in another domain. They should be able to get and list domain
+ # specific roles from their own domain, but not be able to create,
+ # update or delete them,
+ self.auth = self.build_authentication_request(
+ user_id=self.project_adminB_user['id'],
+ password=self.project_adminB_user['password'],
+ project_id=self.projectB['id'])
+
+ # Try access the domain specific roless in another domain
+ self._domain_role_management_cases(
+ self.domainA['id'], expected=exception.ForbiddenAction.code)
+
+ # ...but they should be ablet to work with those in their own domain
+ self.auth = self.build_authentication_request(
+ user_id=self.project_admin_user['id'],
+ password=self.project_admin_user['password'],
+ project_id=self.project['id'])
+
+ self._domain_role_management_cases(
+ self.domainA['id'], read_status_OK=True,
+ expected=exception.ForbiddenAction.code)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_resource.py b/keystone-moon/keystone/tests/unit/test_v3_resource.py
new file mode 100644
index 00000000..f54fcb57
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_resource.py
@@ -0,0 +1,1434 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import uuid
+
+from oslo_config import cfg
+from six.moves import http_client
+from six.moves import range
+from testtools import matchers
+
+from keystone.common import controller
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit import test_v3
+from keystone.tests.unit import utils as test_utils
+
+
+CONF = cfg.CONF
+
+
+class ResourceTestCase(test_v3.RestfulTestCase,
+ test_v3.AssignmentTestMixin):
+ """Test domains and projects."""
+
+ # Domain CRUD tests
+
+ def test_create_domain(self):
+ """Call ``POST /domains``."""
+ ref = unit.new_domain_ref()
+ r = self.post(
+ '/domains',
+ body={'domain': ref})
+ return self.assertValidDomainResponse(r, ref)
+
+ def test_create_domain_case_sensitivity(self):
+ """Call `POST /domains`` twice with upper() and lower() cased name."""
+ ref = unit.new_domain_ref()
+
+ # ensure the name is lowercase
+ ref['name'] = ref['name'].lower()
+ r = self.post(
+ '/domains',
+ body={'domain': ref})
+ self.assertValidDomainResponse(r, ref)
+
+ # ensure the name is uppercase
+ ref['name'] = ref['name'].upper()
+ r = self.post(
+ '/domains',
+ body={'domain': ref})
+ self.assertValidDomainResponse(r, ref)
+
+ def test_create_domain_bad_request(self):
+ """Call ``POST /domains``."""
+ self.post('/domains', body={'domain': {}},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_domain_unsafe(self):
+ """Call ``POST /domains with unsafe names``."""
+ unsafe_name = 'i am not / safe'
+
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe='off')
+ ref = unit.new_domain_ref(name=unsafe_name)
+ self.post(
+ '/domains',
+ body={'domain': ref})
+
+ for config_setting in ['new', 'strict']:
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe=config_setting)
+ ref = unit.new_domain_ref(name=unsafe_name)
+ self.post(
+ '/domains',
+ body={'domain': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_domain_unsafe_default(self):
+ """Check default for unsafe names for ``POST /domains``."""
+ unsafe_name = 'i am not / safe'
+
+ # By default, we should be able to create unsafe names
+ ref = unit.new_domain_ref(name=unsafe_name)
+ self.post(
+ '/domains',
+ body={'domain': ref})
+
+ def test_create_domain_creates_is_domain_project(self):
+ """Check a project that acts as a domain is created.
+
+ Call ``POST /domains``.
+ """
+ # Create a new domain
+ domain_ref = unit.new_domain_ref()
+ r = self.post('/domains', body={'domain': domain_ref})
+ self.assertValidDomainResponse(r, domain_ref)
+
+ # Retrieve its correspondent project
+ r = self.get('/projects/%(project_id)s' % {
+ 'project_id': r.result['domain']['id']})
+ self.assertValidProjectResponse(r)
+
+ # The created project has is_domain flag as True
+ self.assertTrue(r.result['project']['is_domain'])
+
+ # And its parent_id and domain_id attributes are equal
+ self.assertIsNone(r.result['project']['parent_id'])
+ self.assertIsNone(r.result['project']['domain_id'])
+
+ def test_create_is_domain_project_creates_domain(self):
+ """Call ``POST /projects`` is_domain and check a domain is created."""
+ # Create a new project that acts as a domain
+ project_ref = unit.new_project_ref(domain_id=None, is_domain=True)
+ r = self.post('/projects', body={'project': project_ref})
+ self.assertValidProjectResponse(r)
+
+ # Retrieve its correspondent domain
+ r = self.get('/domains/%(domain_id)s' % {
+ 'domain_id': r.result['project']['id']})
+ self.assertValidDomainResponse(r)
+ self.assertIsNotNone(r.result['domain'])
+
+ def test_list_domains(self):
+ """Call ``GET /domains``."""
+ resource_url = '/domains'
+ r = self.get(resource_url)
+ self.assertValidDomainListResponse(r, ref=self.domain,
+ resource_url=resource_url)
+
+ def test_get_domain(self):
+ """Call ``GET /domains/{domain_id}``."""
+ r = self.get('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id})
+ self.assertValidDomainResponse(r, self.domain)
+
+ def test_update_domain(self):
+ """Call ``PATCH /domains/{domain_id}``."""
+ ref = unit.new_domain_ref()
+ del ref['id']
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id},
+ body={'domain': ref})
+ self.assertValidDomainResponse(r, ref)
+
+ def test_update_domain_unsafe(self):
+ """Call ``POST /domains/{domain_id} with unsafe names``."""
+ unsafe_name = 'i am not / safe'
+
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe='off')
+ ref = unit.new_domain_ref(name=unsafe_name)
+ del ref['id']
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id},
+ body={'domain': ref})
+
+ unsafe_name = 'i am still not / safe'
+ for config_setting in ['new', 'strict']:
+ self.config_fixture.config(group='resource',
+ domain_name_url_safe=config_setting)
+ ref = unit.new_domain_ref(name=unsafe_name)
+ del ref['id']
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id},
+ body={'domain': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_update_domain_unsafe_default(self):
+ """Check default for unsafe names for ``POST /domains``."""
+ unsafe_name = 'i am not / safe'
+
+ # By default, we should be able to create unsafe names
+ ref = unit.new_domain_ref(name=unsafe_name)
+ del ref['id']
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain_id},
+ body={'domain': ref})
+
+ def test_update_domain_updates_is_domain_project(self):
+ """Check the project that acts as a domain is updated.
+
+ Call ``PATCH /domains``.
+ """
+ # Create a new domain
+ domain_ref = unit.new_domain_ref()
+ r = self.post('/domains', body={'domain': domain_ref})
+ self.assertValidDomainResponse(r, domain_ref)
+
+ # Disable it
+ self.patch('/domains/%s' % r.result['domain']['id'],
+ body={'domain': {'enabled': False}})
+
+ # Retrieve its correspondent project
+ r = self.get('/projects/%(project_id)s' % {
+ 'project_id': r.result['domain']['id']})
+ self.assertValidProjectResponse(r)
+
+ # The created project is disabled as well
+ self.assertFalse(r.result['project']['enabled'])
+
+ def test_disable_domain(self):
+ """Call ``PATCH /domains/{domain_id}`` (set enabled=False)."""
+ # Create a 2nd set of entities in a 2nd domain
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+
+ project2 = unit.new_project_ref(domain_id=domain2['id'])
+ self.resource_api.create_project(project2['id'], project2)
+
+ user2 = unit.create_user(self.identity_api,
+ domain_id=domain2['id'],
+ project_id=project2['id'])
+
+ self.assignment_api.add_user_to_project(project2['id'],
+ user2['id'])
+
+ # First check a user in that domain can authenticate..
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': user2['id'],
+ 'password': user2['password']
+ },
+ 'tenantId': project2['id']
+ }
+ }
+ self.admin_request(
+ path='/v2.0/tokens', method='POST', body=body)
+
+ auth_data = self.build_authentication_request(
+ user_id=user2['id'],
+ password=user2['password'],
+ project_id=project2['id'])
+ self.v3_create_token(auth_data)
+
+ # Now disable the domain
+ domain2['enabled'] = False
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': domain2['id']},
+ body={'domain': {'enabled': False}})
+ self.assertValidDomainResponse(r, domain2)
+
+ # Make sure the user can no longer authenticate, via
+ # either API
+ body = {
+ 'auth': {
+ 'passwordCredentials': {
+ 'userId': user2['id'],
+ 'password': user2['password']
+ },
+ 'tenantId': project2['id']
+ }
+ }
+ self.admin_request(
+ path='/v2.0/tokens', method='POST', body=body,
+ expected_status=http_client.UNAUTHORIZED)
+
+ # Try looking up in v3 by name and id
+ auth_data = self.build_authentication_request(
+ user_id=user2['id'],
+ password=user2['password'],
+ project_id=project2['id'])
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ auth_data = self.build_authentication_request(
+ username=user2['name'],
+ user_domain_id=domain2['id'],
+ password=user2['password'],
+ project_id=project2['id'])
+ self.v3_create_token(auth_data,
+ expected_status=http_client.UNAUTHORIZED)
+
+ def test_delete_enabled_domain_fails(self):
+ """Call ``DELETE /domains/{domain_id}`` (when domain enabled)."""
+ # Try deleting an enabled domain, which should fail
+ self.delete('/domains/%(domain_id)s' % {
+ 'domain_id': self.domain['id']},
+ expected_status=exception.ForbiddenAction.code)
+
+ def test_delete_domain(self):
+ """Call ``DELETE /domains/{domain_id}``.
+
+ The sample data set up already has a user and project that is part of
+ self.domain. Additionally we will create a group and a credential
+ within it. Since the user we will authenticate with is in this domain,
+ we create a another set of entities in a second domain. Deleting this
+ second domain should delete all these new entities. In addition,
+ all the entities in the regular self.domain should be unaffected
+ by the delete.
+
+ Test Plan:
+
+ - Create domain2 and a 2nd set of entities
+ - Disable domain2
+ - Delete domain2
+ - Check entities in domain2 have been deleted
+ - Check entities in self.domain are unaffected
+
+ """
+ # Create a group and a credential in the main domain
+ group = unit.new_group_ref(domain_id=self.domain_id)
+ group = self.identity_api.create_group(group)
+
+ credential = unit.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id)
+ self.credential_api.create_credential(credential['id'], credential)
+
+ # Create a 2nd set of entities in a 2nd domain
+ domain2 = unit.new_domain_ref()
+ self.resource_api.create_domain(domain2['id'], domain2)
+
+ project2 = unit.new_project_ref(domain_id=domain2['id'])
+ project2 = self.resource_api.create_project(project2['id'], project2)
+
+ user2 = unit.new_user_ref(domain_id=domain2['id'],
+ project_id=project2['id'])
+ user2 = self.identity_api.create_user(user2)
+
+ group2 = unit.new_group_ref(domain_id=domain2['id'])
+ group2 = self.identity_api.create_group(group2)
+
+ credential2 = unit.new_credential_ref(user_id=user2['id'],
+ project_id=project2['id'])
+ self.credential_api.create_credential(credential2['id'],
+ credential2)
+
+ # Now disable the new domain and delete it
+ domain2['enabled'] = False
+ r = self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': domain2['id']},
+ body={'domain': {'enabled': False}})
+ self.assertValidDomainResponse(r, domain2)
+ self.delete('/domains/%(domain_id)s' % {'domain_id': domain2['id']})
+
+ # Check all the domain2 relevant entities are gone
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain2['id'])
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ project2['id'])
+ self.assertRaises(exception.GroupNotFound,
+ self.identity_api.get_group,
+ group2['id'])
+ self.assertRaises(exception.UserNotFound,
+ self.identity_api.get_user,
+ user2['id'])
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ credential2['id'])
+
+ # ...and that all self.domain entities are still here
+ r = self.resource_api.get_domain(self.domain['id'])
+ self.assertDictEqual(self.domain, r)
+ r = self.resource_api.get_project(self.project['id'])
+ self.assertDictEqual(self.project, r)
+ r = self.identity_api.get_group(group['id'])
+ self.assertDictEqual(group, r)
+ r = self.identity_api.get_user(self.user['id'])
+ self.user.pop('password')
+ self.assertDictEqual(self.user, r)
+ r = self.credential_api.get_credential(credential['id'])
+ self.assertDictEqual(credential, r)
+
+ def test_delete_domain_deletes_is_domain_project(self):
+ """Check the project that acts as a domain is deleted.
+
+ Call ``DELETE /domains``.
+ """
+ # Create a new domain
+ domain_ref = unit.new_domain_ref()
+ r = self.post('/domains', body={'domain': domain_ref})
+ self.assertValidDomainResponse(r, domain_ref)
+
+ # Retrieve its correspondent project
+ self.get('/projects/%(project_id)s' % {
+ 'project_id': r.result['domain']['id']})
+
+ # Delete the domain
+ self.patch('/domains/%s' % r.result['domain']['id'],
+ body={'domain': {'enabled': False}})
+ self.delete('/domains/%s' % r.result['domain']['id'])
+
+ # The created project is deleted as well
+ self.get('/projects/%(project_id)s' % {
+ 'project_id': r.result['domain']['id']}, expected_status=404)
+
+ def test_delete_default_domain(self):
+ # Need to disable it first.
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': CONF.identity.default_domain_id},
+ body={'domain': {'enabled': False}})
+
+ self.delete(
+ '/domains/%(domain_id)s' % {
+ 'domain_id': CONF.identity.default_domain_id})
+
+ def test_token_revoked_once_domain_disabled(self):
+ """Test token from a disabled domain has been invalidated.
+
+ Test that a token that was valid for an enabled domain
+ becomes invalid once that domain is disabled.
+
+ """
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+
+ user2 = unit.create_user(self.identity_api,
+ domain_id=domain['id'])
+
+ # build a request body
+ auth_body = self.build_authentication_request(
+ user_id=user2['id'],
+ password=user2['password'])
+
+ # sends a request for the user's token
+ token_resp = self.post('/auth/tokens', body=auth_body)
+
+ subject_token = token_resp.headers.get('x-subject-token')
+
+ # validates the returned token and it should be valid.
+ self.head('/auth/tokens',
+ headers={'x-subject-token': subject_token},
+ expected_status=http_client.OK)
+
+ # now disable the domain
+ domain['enabled'] = False
+ url = "/domains/%(domain_id)s" % {'domain_id': domain['id']}
+ self.patch(url,
+ body={'domain': {'enabled': False}})
+
+ # validates the same token again and it should be 'not found'
+ # as the domain has already been disabled.
+ self.head('/auth/tokens',
+ headers={'x-subject-token': subject_token},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_delete_domain_hierarchy(self):
+ """Call ``DELETE /domains/{domain_id}``."""
+ domain = unit.new_domain_ref()
+ self.resource_api.create_domain(domain['id'], domain)
+
+ root_project = unit.new_project_ref(domain_id=domain['id'])
+ root_project = self.resource_api.create_project(root_project['id'],
+ root_project)
+
+ leaf_project = unit.new_project_ref(
+ domain_id=domain['id'],
+ parent_id=root_project['id'])
+ self.resource_api.create_project(leaf_project['id'], leaf_project)
+
+ # Need to disable it first.
+ self.patch('/domains/%(domain_id)s' % {
+ 'domain_id': domain['id']},
+ body={'domain': {'enabled': False}})
+
+ self.delete(
+ '/domains/%(domain_id)s' % {
+ 'domain_id': domain['id']})
+
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.get_domain,
+ domain['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ root_project['id'])
+
+ self.assertRaises(exception.ProjectNotFound,
+ self.resource_api.get_project,
+ leaf_project['id'])
+
+ def test_forbid_operations_on_federated_domain(self):
+ """Make sure one cannot operate on federated domain.
+
+ This includes operations like create, update, delete
+ on domain identified by id and name where difference variations of
+ id 'Federated' are used.
+
+ """
+ def create_domains():
+ for variation in ('Federated', 'FEDERATED',
+ 'federated', 'fEderated'):
+ domain = unit.new_domain_ref()
+ domain['id'] = variation
+ yield domain
+
+ for domain in create_domains():
+ self.assertRaises(
+ AssertionError, self.resource_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ AssertionError, self.resource_api.update_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ exception.DomainNotFound, self.resource_api.delete_domain,
+ domain['id'])
+
+ # swap 'name' with 'id' and try again, expecting the request to
+ # gracefully fail
+ domain['id'], domain['name'] = domain['name'], domain['id']
+ self.assertRaises(
+ AssertionError, self.resource_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ AssertionError, self.resource_api.update_domain,
+ domain['id'], domain)
+ self.assertRaises(
+ exception.DomainNotFound, self.resource_api.delete_domain,
+ domain['id'])
+
+ def test_forbid_operations_on_defined_federated_domain(self):
+ """Make sure one cannot operate on a user-defined federated domain.
+
+ This includes operations like create, update, delete.
+
+ """
+ non_default_name = 'beta_federated_domain'
+ self.config_fixture.config(group='federation',
+ federated_domain_name=non_default_name)
+ domain = unit.new_domain_ref(name=non_default_name)
+ self.assertRaises(AssertionError,
+ self.resource_api.create_domain,
+ domain['id'], domain)
+ self.assertRaises(exception.DomainNotFound,
+ self.resource_api.delete_domain,
+ domain['id'])
+ self.assertRaises(AssertionError,
+ self.resource_api.update_domain,
+ domain['id'], domain)
+
+ # Project CRUD tests
+
+ def test_list_projects(self):
+ """Call ``GET /projects``."""
+ resource_url = '/projects'
+ r = self.get(resource_url)
+ self.assertValidProjectListResponse(r, ref=self.project,
+ resource_url=resource_url)
+
+ def test_create_project(self):
+ """Call ``POST /projects``."""
+ ref = unit.new_project_ref(domain_id=self.domain_id)
+ r = self.post(
+ '/projects',
+ body={'project': ref})
+ self.assertValidProjectResponse(r, ref)
+
+ def test_create_project_bad_request(self):
+ """Call ``POST /projects``."""
+ self.post('/projects', body={'project': {}},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_project_invalid_domain_id(self):
+ """Call ``POST /projects``."""
+ ref = unit.new_project_ref(domain_id=uuid.uuid4().hex)
+ self.post('/projects', body={'project': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_project_unsafe(self):
+ """Call ``POST /projects with unsafe names``."""
+ unsafe_name = 'i am not / safe'
+
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='off')
+ ref = unit.new_project_ref(name=unsafe_name)
+ self.post(
+ '/projects',
+ body={'project': ref})
+
+ for config_setting in ['new', 'strict']:
+ self.config_fixture.config(group='resource',
+ project_name_url_safe=config_setting)
+ ref = unit.new_project_ref(name=unsafe_name)
+ self.post(
+ '/projects',
+ body={'project': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_project_unsafe_default(self):
+ """Check default for unsafe names for ``POST /projects``."""
+ unsafe_name = 'i am not / safe'
+
+ # By default, we should be able to create unsafe names
+ ref = unit.new_project_ref(name=unsafe_name)
+ self.post(
+ '/projects',
+ body={'project': ref})
+
+ def test_create_project_with_parent_id_none_and_domain_id_none(self):
+ """Call ``POST /projects``."""
+ # Grant a domain role for the user
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+ self.put(member_url)
+
+ # Create an authentication request for a domain scoped token
+ auth = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain_id)
+
+ # Without parent_id and domain_id passed as None, the domain_id should
+ # be normalized to the domain on the token, when using a domain
+ # scoped token.
+ ref = unit.new_project_ref()
+ r = self.post(
+ '/projects',
+ auth=auth,
+ body={'project': ref})
+ ref['domain_id'] = self.domain['id']
+ self.assertValidProjectResponse(r, ref)
+
+ def test_create_project_without_parent_id_and_without_domain_id(self):
+ """Call ``POST /projects``."""
+ # Grant a domain role for the user
+ collection_url = (
+ '/domains/%(domain_id)s/users/%(user_id)s/roles' % {
+ 'domain_id': self.domain_id,
+ 'user_id': self.user['id']})
+ member_url = '%(collection_url)s/%(role_id)s' % {
+ 'collection_url': collection_url,
+ 'role_id': self.role_id}
+ self.put(member_url)
+
+ # Create an authentication request for a domain scoped token
+ auth = self.build_authentication_request(
+ user_id=self.user['id'],
+ password=self.user['password'],
+ domain_id=self.domain_id)
+
+ # Without domain_id and parent_id, the domain_id should be
+ # normalized to the domain on the token, when using a domain
+ # scoped token.
+ ref = unit.new_project_ref()
+ r = self.post(
+ '/projects',
+ auth=auth,
+ body={'project': ref})
+ ref['domain_id'] = self.domain['id']
+ self.assertValidProjectResponse(r, ref)
+
+ @test_utils.wip('waiting for support for parent_id to imply domain_id')
+ def test_create_project_with_parent_id_and_no_domain_id(self):
+ """Call ``POST /projects``."""
+ # With only the parent_id, the domain_id should be
+ # normalized to the parent's domain_id
+ ref_child = unit.new_project_ref(parent_id=self.project['id'])
+
+ r = self.post(
+ '/projects',
+ body={'project': ref_child})
+ self.assertEqual(r.result['project']['domain_id'],
+ self.project['domain_id'])
+ ref_child['domain_id'] = self.domain['id']
+ self.assertValidProjectResponse(r, ref_child)
+
+ def _create_projects_hierarchy(self, hierarchy_size=1):
+ """Creates a single-branched project hierarchy with the specified size.
+
+ :param hierarchy_size: the desired hierarchy size, default is 1 -
+ a project with one child.
+
+ :returns projects: a list of the projects in the created hierarchy.
+
+ """
+ new_ref = unit.new_project_ref(domain_id=self.domain_id)
+ resp = self.post('/projects', body={'project': new_ref})
+
+ projects = [resp.result]
+
+ for i in range(hierarchy_size):
+ new_ref = unit.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[i]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+
+ projects.append(resp.result)
+
+ return projects
+
+ def test_list_projects_filtering_by_parent_id(self):
+ """Call ``GET /projects?parent_id={project_id}``."""
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ # Add another child to projects[1] - it will be projects[3]
+ new_ref = unit.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[1]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+
+ projects.append(resp.result)
+
+ # Query for projects[0] immediate children - it will
+ # be only projects[1]
+ r = self.get(
+ '/projects?parent_id=%(project_id)s' % {
+ 'project_id': projects[0]['project']['id']})
+ self.assertValidProjectListResponse(r)
+
+ projects_result = r.result['projects']
+ expected_list = [projects[1]['project']]
+
+ # projects[0] has projects[1] as child
+ self.assertEqual(expected_list, projects_result)
+
+ # Query for projects[1] immediate children - it will
+ # be projects[2] and projects[3]
+ r = self.get(
+ '/projects?parent_id=%(project_id)s' % {
+ 'project_id': projects[1]['project']['id']})
+ self.assertValidProjectListResponse(r)
+
+ projects_result = r.result['projects']
+ expected_list = [projects[2]['project'], projects[3]['project']]
+
+ # projects[1] has projects[2] and projects[3] as children
+ self.assertEqual(expected_list, projects_result)
+
+ # Query for projects[2] immediate children - it will be an empty list
+ r = self.get(
+ '/projects?parent_id=%(project_id)s' % {
+ 'project_id': projects[2]['project']['id']})
+ self.assertValidProjectListResponse(r)
+
+ projects_result = r.result['projects']
+ expected_list = []
+
+ # projects[2] has no child, projects_result must be an empty list
+ self.assertEqual(expected_list, projects_result)
+
+ def test_create_hierarchical_project(self):
+ """Call ``POST /projects``."""
+ self._create_projects_hierarchy()
+
+ def test_get_project(self):
+ """Call ``GET /projects/{project_id}``."""
+ r = self.get(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+ self.assertValidProjectResponse(r, self.project)
+
+ def test_get_project_with_parents_as_list_with_invalid_id(self):
+ """Call ``GET /projects/{project_id}?parents_as_list``."""
+ self.get('/projects/%(project_id)s?parents_as_list' % {
+ 'project_id': None}, expected_status=http_client.NOT_FOUND)
+
+ self.get('/projects/%(project_id)s?parents_as_list' % {
+ 'project_id': uuid.uuid4().hex},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_get_project_with_subtree_as_list_with_invalid_id(self):
+ """Call ``GET /projects/{project_id}?subtree_as_list``."""
+ self.get('/projects/%(project_id)s?subtree_as_list' % {
+ 'project_id': None}, expected_status=http_client.NOT_FOUND)
+
+ self.get('/projects/%(project_id)s?subtree_as_list' % {
+ 'project_id': uuid.uuid4().hex},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_get_project_with_parents_as_ids(self):
+ """Call ``GET /projects/{project_id}?parents_as_ids``."""
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ # Query for projects[2] parents_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?parents_as_ids' % {
+ 'project_id': projects[2]['project']['id']})
+
+ self.assertValidProjectResponse(r, projects[2]['project'])
+ parents_as_ids = r.result['project']['parents']
+
+ # Assert parents_as_ids is a structured dictionary correctly
+ # representing the hierarchy. The request was made using projects[2]
+ # id, hence its parents should be projects[1], projects[0] and the
+ # is_domain_project, which is the root of the hierarchy. It should
+ # have the following structure:
+ # {
+ # projects[1]: {
+ # projects[0]: {
+ # is_domain_project: None
+ # }
+ # }
+ # }
+ is_domain_project_id = projects[0]['project']['domain_id']
+ expected_dict = {
+ projects[1]['project']['id']: {
+ projects[0]['project']['id']: {is_domain_project_id: None}
+ }
+ }
+ self.assertDictEqual(expected_dict, parents_as_ids)
+
+ # Query for projects[0] parents_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?parents_as_ids' % {
+ 'project_id': projects[0]['project']['id']})
+
+ self.assertValidProjectResponse(r, projects[0]['project'])
+ parents_as_ids = r.result['project']['parents']
+
+ # projects[0] has only the project that acts as a domain as parent
+ expected_dict = {
+ is_domain_project_id: None
+ }
+ self.assertDictEqual(expected_dict, parents_as_ids)
+
+ # Query for is_domain_project parents_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?parents_as_ids' % {
+ 'project_id': is_domain_project_id})
+
+ parents_as_ids = r.result['project']['parents']
+
+ # the project that acts as a domain has no parents, parents_as_ids
+ # must be None
+ self.assertIsNone(parents_as_ids)
+
+ def test_get_project_with_parents_as_list_with_full_access(self):
+ """``GET /projects/{project_id}?parents_as_list`` with full access.
+
+ Test plan:
+
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on each one of those projects;
+ - Check that calling parents_as_list on 'subproject' returns both
+ 'project' and 'parent'.
+
+ """
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on all the created projects
+ for proj in (parent, project, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?parents_as_list' %
+ {'project_id': subproject['project']['id']})
+ self.assertValidProjectResponse(r, subproject['project'])
+
+ # Assert only 'project' and 'parent' are in the parents list
+ self.assertIn(project, r.result['project']['parents'])
+ self.assertIn(parent, r.result['project']['parents'])
+ self.assertEqual(2, len(r.result['project']['parents']))
+
+ def test_get_project_with_parents_as_list_with_partial_access(self):
+ """``GET /projects/{project_id}?parents_as_list`` with partial access.
+
+ Test plan:
+
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on 'parent' and 'subproject';
+ - Check that calling parents_as_list on 'subproject' only returns
+ 'parent'.
+
+ """
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on parent and subproject
+ for proj in (parent, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?parents_as_list' %
+ {'project_id': subproject['project']['id']})
+ self.assertValidProjectResponse(r, subproject['project'])
+
+ # Assert only 'parent' is in the parents list
+ self.assertIn(parent, r.result['project']['parents'])
+ self.assertEqual(1, len(r.result['project']['parents']))
+
+ def test_get_project_with_parents_as_list_and_parents_as_ids(self):
+ """Attempt to list a project's parents as both a list and as IDs.
+
+ This uses ``GET /projects/{project_id}?parents_as_list&parents_as_ids``
+ which should fail with a Bad Request due to the conflicting query
+ strings.
+
+ """
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ self.get(
+ '/projects/%(project_id)s?parents_as_list&parents_as_ids' % {
+ 'project_id': projects[1]['project']['id']},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_list_project_is_domain_filter(self):
+ """Call ``GET /projects?is_domain=True/False``."""
+ # Get the initial number of projects, both acting as a domain as well
+ # as regular.
+ r = self.get('/projects?is_domain=True', expected_status=200)
+ initial_number_is_domain_true = len(r.result['projects'])
+ r = self.get('/projects?is_domain=False', expected_status=200)
+ initial_number_is_domain_false = len(r.result['projects'])
+
+ # Add some more projects acting as domains
+ new_is_domain_project = unit.new_project_ref(is_domain=True)
+ new_is_domain_project = self.resource_api.create_project(
+ new_is_domain_project['id'], new_is_domain_project)
+ new_is_domain_project2 = unit.new_project_ref(is_domain=True)
+ new_is_domain_project2 = self.resource_api.create_project(
+ new_is_domain_project2['id'], new_is_domain_project2)
+ number_is_domain_true = initial_number_is_domain_true + 2
+
+ r = self.get('/projects?is_domain=True', expected_status=200)
+ self.assertThat(r.result['projects'],
+ matchers.HasLength(number_is_domain_true))
+ self.assertIn(new_is_domain_project['id'],
+ [p['id'] for p in r.result['projects']])
+ self.assertIn(new_is_domain_project2['id'],
+ [p['id'] for p in r.result['projects']])
+
+ # Now add a regular project
+ new_regular_project = unit.new_project_ref(domain_id=self.domain_id)
+ new_regular_project = self.resource_api.create_project(
+ new_regular_project['id'], new_regular_project)
+ number_is_domain_false = initial_number_is_domain_false + 1
+
+ # Check we still have the same number of projects acting as domains
+ r = self.get('/projects?is_domain=True', expected_status=200)
+ self.assertThat(r.result['projects'],
+ matchers.HasLength(number_is_domain_true))
+
+ # Check the number of regular projects is correct
+ r = self.get('/projects?is_domain=False', expected_status=200)
+ self.assertThat(r.result['projects'],
+ matchers.HasLength(number_is_domain_false))
+ self.assertIn(new_regular_project['id'],
+ [p['id'] for p in r.result['projects']])
+
+ def test_list_project_is_domain_filter_default(self):
+ """Default project list should not see projects acting as domains"""
+ # Get the initial count of regular projects
+ r = self.get('/projects?is_domain=False', expected_status=200)
+ number_is_domain_false = len(r.result['projects'])
+
+ # Make sure we have at least one project acting as a domain
+ new_is_domain_project = unit.new_project_ref(is_domain=True)
+ new_is_domain_project = self.resource_api.create_project(
+ new_is_domain_project['id'], new_is_domain_project)
+
+ r = self.get('/projects', expected_status=200)
+ self.assertThat(r.result['projects'],
+ matchers.HasLength(number_is_domain_false))
+ self.assertNotIn(new_is_domain_project, r.result['projects'])
+
+ def test_get_project_with_subtree_as_ids(self):
+ """Call ``GET /projects/{project_id}?subtree_as_ids``.
+
+ This test creates a more complex hierarchy to test if the structured
+ dictionary returned by using the ``subtree_as_ids`` query param
+ correctly represents the hierarchy.
+
+ The hierarchy contains 5 projects with the following structure::
+
+ +--A--+
+ | |
+ +--B--+ C
+ | |
+ D E
+
+
+ """
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ # Add another child to projects[0] - it will be projects[3]
+ new_ref = unit.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[0]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+ projects.append(resp.result)
+
+ # Add another child to projects[1] - it will be projects[4]
+ new_ref = unit.new_project_ref(
+ domain_id=self.domain_id,
+ parent_id=projects[1]['project']['id'])
+ resp = self.post('/projects',
+ body={'project': new_ref})
+ self.assertValidProjectResponse(resp, new_ref)
+ projects.append(resp.result)
+
+ # Query for projects[0] subtree_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?subtree_as_ids' % {
+ 'project_id': projects[0]['project']['id']})
+ self.assertValidProjectResponse(r, projects[0]['project'])
+ subtree_as_ids = r.result['project']['subtree']
+
+ # The subtree hierarchy from projects[0] should have the following
+ # structure:
+ # {
+ # projects[1]: {
+ # projects[2]: None,
+ # projects[4]: None
+ # },
+ # projects[3]: None
+ # }
+ expected_dict = {
+ projects[1]['project']['id']: {
+ projects[2]['project']['id']: None,
+ projects[4]['project']['id']: None
+ },
+ projects[3]['project']['id']: None
+ }
+ self.assertDictEqual(expected_dict, subtree_as_ids)
+
+ # Now query for projects[1] subtree_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?subtree_as_ids' % {
+ 'project_id': projects[1]['project']['id']})
+ self.assertValidProjectResponse(r, projects[1]['project'])
+ subtree_as_ids = r.result['project']['subtree']
+
+ # The subtree hierarchy from projects[1] should have the following
+ # structure:
+ # {
+ # projects[2]: None,
+ # projects[4]: None
+ # }
+ expected_dict = {
+ projects[2]['project']['id']: None,
+ projects[4]['project']['id']: None
+ }
+ self.assertDictEqual(expected_dict, subtree_as_ids)
+
+ # Now query for projects[3] subtree_as_ids
+ r = self.get(
+ '/projects/%(project_id)s?subtree_as_ids' % {
+ 'project_id': projects[3]['project']['id']})
+ self.assertValidProjectResponse(r, projects[3]['project'])
+ subtree_as_ids = r.result['project']['subtree']
+
+ # projects[3] has no subtree, subtree_as_ids must be None
+ self.assertIsNone(subtree_as_ids)
+
+ def test_get_project_with_subtree_as_list_with_full_access(self):
+ """``GET /projects/{project_id}?subtree_as_list`` with full access.
+
+ Test plan:
+
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on each one of those projects;
+ - Check that calling subtree_as_list on 'parent' returns both 'parent'
+ and 'subproject'.
+
+ """
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on all the created projects
+ for proj in (parent, project, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?subtree_as_list' %
+ {'project_id': parent['project']['id']})
+ self.assertValidProjectResponse(r, parent['project'])
+
+ # Assert only 'project' and 'subproject' are in the subtree
+ self.assertIn(project, r.result['project']['subtree'])
+ self.assertIn(subproject, r.result['project']['subtree'])
+ self.assertEqual(2, len(r.result['project']['subtree']))
+
+ def test_get_project_with_subtree_as_list_with_partial_access(self):
+ """``GET /projects/{project_id}?subtree_as_list`` with partial access.
+
+ Test plan:
+
+ - Create 'parent', 'project' and 'subproject' projects;
+ - Assign a user a role on 'parent' and 'subproject';
+ - Check that calling subtree_as_list on 'parent' returns 'subproject'.
+
+ """
+ # Create the project hierarchy
+ parent, project, subproject = self._create_projects_hierarchy(2)
+
+ # Assign a role for the user on parent and subproject
+ for proj in (parent, subproject):
+ self.put(self.build_role_assignment_link(
+ role_id=self.role_id, user_id=self.user_id,
+ project_id=proj['project']['id']))
+
+ # Make the API call
+ r = self.get('/projects/%(project_id)s?subtree_as_list' %
+ {'project_id': parent['project']['id']})
+ self.assertValidProjectResponse(r, parent['project'])
+
+ # Assert only 'subproject' is in the subtree
+ self.assertIn(subproject, r.result['project']['subtree'])
+ self.assertEqual(1, len(r.result['project']['subtree']))
+
+ def test_get_project_with_subtree_as_list_and_subtree_as_ids(self):
+ """Attempt to get a project subtree as both a list and as IDs.
+
+ This uses ``GET /projects/{project_id}?subtree_as_list&subtree_as_ids``
+ which should fail with a bad request due to the conflicting query
+ strings.
+
+ """
+ projects = self._create_projects_hierarchy(hierarchy_size=2)
+
+ self.get(
+ '/projects/%(project_id)s?subtree_as_list&subtree_as_ids' % {
+ 'project_id': projects[1]['project']['id']},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_update_project(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ ref = unit.new_project_ref(domain_id=self.domain_id,
+ parent_id=self.project['parent_id'])
+ del ref['id']
+ r = self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id},
+ body={'project': ref})
+ self.assertValidProjectResponse(r, ref)
+
+ def test_update_project_unsafe(self):
+ """Call ``POST /projects/{project_id} with unsafe names``."""
+ unsafe_name = 'i am not / safe'
+
+ self.config_fixture.config(group='resource',
+ project_name_url_safe='off')
+ ref = unit.new_project_ref(name=unsafe_name,
+ domain_id=self.domain_id,
+ parent_id=self.project['parent_id'])
+ del ref['id']
+ self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id},
+ body={'project': ref})
+
+ unsafe_name = 'i am still not / safe'
+ for config_setting in ['new', 'strict']:
+ self.config_fixture.config(group='resource',
+ project_name_url_safe=config_setting)
+ ref = unit.new_project_ref(name=unsafe_name,
+ domain_id=self.domain_id,
+ parent_id=self.project['parent_id'])
+ del ref['id']
+ self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id},
+ body={'project': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_update_project_unsafe_default(self):
+ """Check default for unsafe names for ``POST /projects``."""
+ unsafe_name = 'i am not / safe'
+
+ # By default, we should be able to create unsafe names
+ ref = unit.new_project_ref(name=unsafe_name,
+ domain_id=self.domain_id,
+ parent_id=self.project['parent_id'])
+ del ref['id']
+ self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id},
+ body={'project': ref})
+
+ def test_update_project_domain_id(self):
+ """Call ``PATCH /projects/{project_id}`` with domain_id."""
+ project = unit.new_project_ref(domain_id=self.domain['id'])
+ project = self.resource_api.create_project(project['id'], project)
+ project['domain_id'] = CONF.identity.default_domain_id
+ r = self.patch('/projects/%(project_id)s' % {
+ 'project_id': project['id']},
+ body={'project': project},
+ expected_status=exception.ValidationError.code)
+ self.config_fixture.config(domain_id_immutable=False)
+ project['domain_id'] = self.domain['id']
+ r = self.patch('/projects/%(project_id)s' % {
+ 'project_id': project['id']},
+ body={'project': project})
+ self.assertValidProjectResponse(r, project)
+
+ def test_update_project_parent_id(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ projects = self._create_projects_hierarchy()
+ leaf_project = projects[1]['project']
+ leaf_project['parent_id'] = None
+ self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': leaf_project['id']},
+ body={'project': leaf_project},
+ expected_status=http_client.FORBIDDEN)
+
+ def test_update_project_is_domain_not_allowed(self):
+ """Call ``PATCH /projects/{project_id}`` with is_domain.
+
+ The is_domain flag is immutable.
+ """
+ project = unit.new_project_ref(domain_id=self.domain['id'])
+ resp = self.post('/projects',
+ body={'project': project})
+ self.assertFalse(resp.result['project']['is_domain'])
+
+ project['parent_id'] = resp.result['project']['parent_id']
+ project['is_domain'] = True
+ self.patch('/projects/%(project_id)s' % {
+ 'project_id': resp.result['project']['id']},
+ body={'project': project},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_disable_leaf_project(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ projects = self._create_projects_hierarchy()
+ leaf_project = projects[1]['project']
+ leaf_project['enabled'] = False
+ r = self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': leaf_project['id']},
+ body={'project': leaf_project})
+ self.assertEqual(
+ leaf_project['enabled'], r.result['project']['enabled'])
+
+ def test_disable_not_leaf_project(self):
+ """Call ``PATCH /projects/{project_id}``."""
+ projects = self._create_projects_hierarchy()
+ root_project = projects[0]['project']
+ root_project['enabled'] = False
+ self.patch(
+ '/projects/%(project_id)s' % {
+ 'project_id': root_project['id']},
+ body={'project': root_project},
+ expected_status=http_client.FORBIDDEN)
+
+ def test_delete_project(self):
+ """Call ``DELETE /projects/{project_id}``
+
+ As well as making sure the delete succeeds, we ensure
+ that any credentials that reference this projects are
+ also deleted, while other credentials are unaffected.
+
+ """
+ credential = unit.new_credential_ref(user_id=self.user['id'],
+ project_id=self.project_id)
+ self.credential_api.create_credential(credential['id'], credential)
+
+ # First check the credential for this project is present
+ r = self.credential_api.get_credential(credential['id'])
+ self.assertDictEqual(credential, r)
+ # Create a second credential with a different project
+ project2 = unit.new_project_ref(domain_id=self.domain['id'])
+ self.resource_api.create_project(project2['id'], project2)
+ credential2 = unit.new_credential_ref(user_id=self.user['id'],
+ project_id=project2['id'])
+ self.credential_api.create_credential(credential2['id'], credential2)
+
+ # Now delete the project
+ self.delete(
+ '/projects/%(project_id)s' % {
+ 'project_id': self.project_id})
+
+ # Deleting the project should have deleted any credentials
+ # that reference this project
+ self.assertRaises(exception.CredentialNotFound,
+ self.credential_api.get_credential,
+ credential_id=credential['id'])
+ # But the credential for project2 is unaffected
+ r = self.credential_api.get_credential(credential2['id'])
+ self.assertDictEqual(credential2, r)
+
+ def test_delete_not_leaf_project(self):
+ """Call ``DELETE /projects/{project_id}``."""
+ projects = self._create_projects_hierarchy()
+ self.delete(
+ '/projects/%(project_id)s' % {
+ 'project_id': projects[0]['project']['id']},
+ expected_status=http_client.FORBIDDEN)
+
+
+class ResourceV3toV2MethodsTestCase(unit.TestCase):
+ """Test domain V3 to V2 conversion methods."""
+
+ def _setup_initial_projects(self):
+ self.project_id = uuid.uuid4().hex
+ self.domain_id = CONF.identity.default_domain_id
+ self.parent_id = uuid.uuid4().hex
+ # Project with only domain_id in ref
+ self.project1 = unit.new_project_ref(id=self.project_id,
+ name=self.project_id,
+ domain_id=self.domain_id)
+ # Project with both domain_id and parent_id in ref
+ self.project2 = unit.new_project_ref(id=self.project_id,
+ name=self.project_id,
+ domain_id=self.domain_id,
+ parent_id=self.parent_id)
+ # Project with no domain_id and parent_id in ref
+ self.project3 = unit.new_project_ref(id=self.project_id,
+ name=self.project_id,
+ domain_id=self.domain_id,
+ parent_id=self.parent_id)
+ # Expected result with no domain_id and parent_id
+ self.expected_project = {'id': self.project_id,
+ 'name': self.project_id}
+
+ def test_v2controller_filter_domain_id(self):
+ # V2.0 is not domain aware, ensure domain_id is popped off the ref.
+ other_data = uuid.uuid4().hex
+ domain_id = CONF.identity.default_domain_id
+ ref = {'domain_id': domain_id,
+ 'other_data': other_data}
+
+ ref_no_domain = {'other_data': other_data}
+ expected_ref = ref_no_domain.copy()
+
+ updated_ref = controller.V2Controller.filter_domain_id(ref)
+ self.assertIs(ref, updated_ref)
+ self.assertDictEqual(expected_ref, ref)
+ # Make sure we don't error/muck up data if domain_id isn't present
+ updated_ref = controller.V2Controller.filter_domain_id(ref_no_domain)
+ self.assertIs(ref_no_domain, updated_ref)
+ self.assertDictEqual(expected_ref, ref_no_domain)
+
+ def test_v3controller_filter_domain_id(self):
+ # No data should be filtered out in this case.
+ other_data = uuid.uuid4().hex
+ domain_id = uuid.uuid4().hex
+ ref = {'domain_id': domain_id,
+ 'other_data': other_data}
+
+ expected_ref = ref.copy()
+ updated_ref = controller.V3Controller.filter_domain_id(ref)
+ self.assertIs(ref, updated_ref)
+ self.assertDictEqual(expected_ref, ref)
+
+ def test_v2controller_filter_domain(self):
+ other_data = uuid.uuid4().hex
+ domain_id = uuid.uuid4().hex
+ non_default_domain_ref = {'domain': {'id': domain_id},
+ 'other_data': other_data}
+ default_domain_ref = {'domain': {'id': 'default'},
+ 'other_data': other_data}
+ updated_ref = controller.V2Controller.filter_domain(default_domain_ref)
+ self.assertNotIn('domain', updated_ref)
+ self.assertNotIn(
+ 'domain',
+ controller.V2Controller.filter_domain(non_default_domain_ref))
+
+ def test_v2controller_filter_project_parent_id(self):
+ # V2.0 is not project hierarchy aware, ensure parent_id is popped off.
+ other_data = uuid.uuid4().hex
+ parent_id = uuid.uuid4().hex
+ ref = {'parent_id': parent_id,
+ 'other_data': other_data}
+
+ ref_no_parent = {'other_data': other_data}
+ expected_ref = ref_no_parent.copy()
+
+ updated_ref = controller.V2Controller.filter_project_parent_id(ref)
+ self.assertIs(ref, updated_ref)
+ self.assertDictEqual(expected_ref, ref)
+ # Make sure we don't error/muck up data if parent_id isn't present
+ updated_ref = controller.V2Controller.filter_project_parent_id(
+ ref_no_parent)
+ self.assertIs(ref_no_parent, updated_ref)
+ self.assertDictEqual(expected_ref, ref_no_parent)
+
+ def test_v3_to_v2_project_method(self):
+ self._setup_initial_projects()
+
+ # TODO(shaleh): these optional fields are not handled well by the
+ # v3_to_v2 code. Manually remove them for now. Eventually update
+ # new_project_ref to not return optional values
+ del self.project1['enabled']
+ del self.project1['description']
+ del self.project2['enabled']
+ del self.project2['description']
+ del self.project3['enabled']
+ del self.project3['description']
+
+ updated_project1 = controller.V2Controller.v3_to_v2_project(
+ self.project1)
+ self.assertIs(self.project1, updated_project1)
+ self.assertDictEqual(self.expected_project, self.project1)
+ updated_project2 = controller.V2Controller.v3_to_v2_project(
+ self.project2)
+ self.assertIs(self.project2, updated_project2)
+ self.assertDictEqual(self.expected_project, self.project2)
+ updated_project3 = controller.V2Controller.v3_to_v2_project(
+ self.project3)
+ self.assertIs(self.project3, updated_project3)
+ self.assertDictEqual(self.expected_project, self.project2)
+
+ def test_v3_to_v2_project_method_list(self):
+ self._setup_initial_projects()
+ project_list = [self.project1, self.project2, self.project3]
+
+ # TODO(shaleh): these optional fields are not handled well by the
+ # v3_to_v2 code. Manually remove them for now. Eventually update
+ # new_project_ref to not return optional values
+ for p in project_list:
+ del p['enabled']
+ del p['description']
+ updated_list = controller.V2Controller.v3_to_v2_project(project_list)
+
+ self.assertEqual(len(updated_list), len(project_list))
+
+ for i, ref in enumerate(updated_list):
+ # Order should not change.
+ self.assertIs(ref, project_list[i])
+
+ self.assertDictEqual(self.expected_project, self.project1)
+ self.assertDictEqual(self.expected_project, self.project2)
+ self.assertDictEqual(self.expected_project, self.project3)
diff --git a/keystone-moon/keystone/tests/unit/test_v3_trust.py b/keystone-moon/keystone/tests/unit/test_v3_trust.py
new file mode 100644
index 00000000..d3127c89
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/test_v3_trust.py
@@ -0,0 +1,403 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from six.moves import http_client
+
+from keystone.tests import unit
+from keystone.tests.unit import test_v3
+
+
+class TestTrustOperations(test_v3.RestfulTestCase):
+ """Test module for create, read, update and delete operations on trusts.
+
+ This module is specific to tests for trust CRUD operations. All other tests
+ related to trusts that are authentication or authorization specific should
+ live in in the keystone/tests/unit/test_v3_auth.py module.
+
+ """
+
+ def setUp(self):
+ super(TestTrustOperations, self).setUp()
+ # create a trustee to delegate stuff to
+ self.trustee_user = unit.create_user(self.identity_api,
+ domain_id=self.domain_id)
+ self.trustee_user_id = self.trustee_user['id']
+
+ def test_create_trust_bad_request(self):
+ # The server returns a 403 Forbidden rather than a 400 Bad Request, see
+ # bug 1133435
+ self.post('/OS-TRUST/trusts', body={'trust': {}},
+ expected_status=http_client.FORBIDDEN)
+
+ def test_trust_crud(self):
+ # create a new trust
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r, ref)
+
+ # get the trust
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
+ self.assertValidTrustResponse(r, ref)
+
+ # validate roles on the trust
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles' % {
+ 'trust_id': trust['id']})
+ roles = self.assertValidRoleListResponse(r, self.role)
+ self.assertIn(self.role['id'], [x['id'] for x in roles])
+ self.head(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']},
+ expected_status=http_client.OK)
+ r = self.get(
+ '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % {
+ 'trust_id': trust['id'],
+ 'role_id': self.role['id']})
+ self.assertValidRoleResponse(r, self.role)
+
+ # list all trusts
+ r = self.get('/OS-TRUST/trusts')
+ self.assertValidTrustListResponse(r, trust)
+
+ # trusts are immutable
+ self.patch(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ body={'trust': ref},
+ expected_status=http_client.NOT_FOUND)
+
+ # delete the trust
+ self.delete(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']})
+
+ # ensure the trust is not found
+ self.get(
+ '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_list_trusts(self):
+ # create three trusts with the same trustor and trustee
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ for i in range(3):
+ ref['expires_at'] = datetime.datetime.utcnow().replace(
+ year=2032).strftime(unit.TIME_FORMAT)
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ self.assertValidTrustResponse(r, ref)
+
+ # list all trusts
+ r = self.get('/OS-TRUST/trusts')
+ trusts = r.result['trusts']
+ self.assertEqual(3, len(trusts))
+ self.assertValidTrustListResponse(r)
+
+ # list all trusts for the trustor
+ r = self.get('/OS-TRUST/trusts?trustor_user_id=%s' %
+ self.user_id)
+ trusts = r.result['trusts']
+ self.assertEqual(3, len(trusts))
+ self.assertValidTrustListResponse(r)
+
+ # list all trusts as the trustor as the trustee.
+ r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' %
+ self.user_id)
+ trusts = r.result['trusts']
+ self.assertEqual(0, len(trusts))
+
+ # list all trusts as the trustee is forbidden
+ r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' %
+ self.trustee_user_id,
+ expected_status=http_client.FORBIDDEN)
+
+ def test_delete_trust(self):
+ # create a trust
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r, ref)
+
+ # delete the trust
+ self.delete('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']})
+
+ # ensure the trust isn't found
+ self.get('/OS-TRUST/trusts/%(trust_id)s' % {
+ 'trust_id': trust['id']},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_create_trust_without_trustee_returns_bad_request(self):
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+
+ # trustee_user_id is required to create a trust
+ del ref['trustee_user_id']
+
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_trust_without_impersonation_returns_bad_request(self):
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+
+ # impersonation is required to create a trust
+ del ref['impersonation']
+
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_trust_with_bad_remaining_uses_returns_bad_request(self):
+ # negative numbers, strings, non-integers, and 0 are not value values
+ for value in [-1, 0, "a bad value", 7.2]:
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ remaining_uses=value,
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts',
+ body={'trust': ref},
+ expected_status=http_client.BAD_REQUEST)
+
+ def test_create_trust_with_non_existant_trustee_returns_not_found(self):
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=uuid.uuid4().hex,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts', body={'trust': ref},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_create_trust_with_trustee_as_trustor_returns_forbidden(self):
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.trustee_user_id,
+ trustee_user_id=self.user_id,
+ project_id=self.project_id,
+ role_ids=[self.role_id])
+ # NOTE(lbragstad): This fails because the user making the request isn't
+ # the trustor defined in the request.
+ self.post('/OS-TRUST/trusts', body={'trust': ref},
+ expected_status=http_client.FORBIDDEN)
+
+ def test_create_trust_with_non_existant_project_returns_not_found(self):
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=uuid.uuid4().hex,
+ role_ids=[self.role_id])
+ self.post('/OS-TRUST/trusts', body={'trust': ref},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_create_trust_with_non_existant_role_id_returns_not_found(self):
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_ids=[uuid.uuid4().hex])
+ self.post('/OS-TRUST/trusts', body={'trust': ref},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_create_trust_with_non_existant_role_name_returns_not_found(self):
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ role_names=[uuid.uuid4().hex])
+ self.post('/OS-TRUST/trusts', body={'trust': ref},
+ expected_status=http_client.NOT_FOUND)
+
+ def test_validate_trust_scoped_token_against_v2_returns_unauthorized(self):
+ # create a new trust
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.default_domain_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(r)
+
+ # get a v3 trust-scoped token as the trustee
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_create_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r, self.default_domain_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token=self.get_admin_token(),
+ method='GET', expected_status=http_client.UNAUTHORIZED)
+
+ def test_v3_v2_intermix_trustor_not_in_default_domain_failed(self):
+ # get a project-scoped token
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ token = self.get_requested_token(auth_data)
+
+ # create a new trust
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.default_domain_project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ # get a trust-scoped token as the trustee
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_create_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(
+ r, self.trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # now validate the v3 token with v2 API
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token=self.get_admin_token(),
+ method='GET', expected_status=http_client.UNAUTHORIZED)
+
+ def test_v3_v2_intermix_project_not_in_default_domain_failed(self):
+ # create a trustee in default domain to delegate stuff to
+ trustee_user = unit.create_user(self.identity_api,
+ domain_id=test_v3.DEFAULT_DOMAIN_ID)
+ trustee_user_id = trustee_user['id']
+
+ # create a new trust
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.default_domain_user_id,
+ trustee_user_id=trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+
+ # get a project-scoped token as the default_domain_user
+ auth_data = self.build_authentication_request(
+ user_id=self.default_domain_user['id'],
+ password=self.default_domain_user['password'],
+ project_id=self.default_domain_project_id)
+ token = self.get_requested_token(auth_data)
+
+ r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token)
+ trust = self.assertValidTrustResponse(r)
+
+ # get a trust-scoped token as the trustee
+ auth_data = self.build_authentication_request(
+ user_id=trustee_user['id'],
+ password=trustee_user['password'],
+ trust_id=trust['id'])
+ r = self.v3_create_token(auth_data)
+ self.assertValidProjectScopedTokenResponse(r, trustee_user)
+ token = r.headers.get('X-Subject-Token')
+
+ # ensure the token is invalid against v2
+ path = '/v2.0/tokens/%s' % (token)
+ self.admin_request(
+ path=path, token=self.get_admin_token(),
+ method='GET', expected_status=http_client.UNAUTHORIZED)
+
+ def test_exercise_trust_scoped_token_without_impersonation(self):
+ # create a new trust
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=False,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ resp = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(resp)
+
+ # get a trust-scoped token as the trustee
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ resp = self.v3_create_token(auth_data)
+ resp_body = resp.json_body['token']
+
+ self.assertValidProjectScopedTokenResponse(resp,
+ self.trustee_user)
+ self.assertEqual(self.trustee_user['id'], resp_body['user']['id'])
+ self.assertEqual(self.trustee_user['name'], resp_body['user']['name'])
+ self.assertEqual(self.domain['id'], resp_body['user']['domain']['id'])
+ self.assertEqual(self.domain['name'],
+ resp_body['user']['domain']['name'])
+ self.assertEqual(self.project['id'], resp_body['project']['id'])
+ self.assertEqual(self.project['name'], resp_body['project']['name'])
+
+ def test_exercise_trust_scoped_token_with_impersonation(self):
+ # create a new trust
+ ref = unit.new_trust_ref(
+ trustor_user_id=self.user_id,
+ trustee_user_id=self.trustee_user_id,
+ project_id=self.project_id,
+ impersonation=True,
+ expires=dict(minutes=1),
+ role_ids=[self.role_id])
+ resp = self.post('/OS-TRUST/trusts', body={'trust': ref})
+ trust = self.assertValidTrustResponse(resp)
+
+ # get a trust-scoped token as the trustee
+ auth_data = self.build_authentication_request(
+ user_id=self.trustee_user['id'],
+ password=self.trustee_user['password'],
+ trust_id=trust['id'])
+ resp = self.v3_create_token(auth_data)
+ resp_body = resp.json_body['token']
+
+ self.assertValidProjectScopedTokenResponse(resp, self.user)
+ self.assertEqual(self.user['id'], resp_body['user']['id'])
+ self.assertEqual(self.user['name'], resp_body['user']['name'])
+ self.assertEqual(self.domain['id'], resp_body['user']['domain']['id'])
+ self.assertEqual(self.domain['name'],
+ resp_body['user']['domain']['name'])
+ self.assertEqual(self.project['id'], resp_body['project']['id'])
+ self.assertEqual(self.project['name'], resp_body['project']['name'])
diff --git a/keystone-moon/keystone/tests/unit/test_validation.py b/keystone-moon/keystone/tests/unit/test_validation.py
index f7a224a0..73cb6ef6 100644
--- a/keystone-moon/keystone/tests/unit/test_validation.py
+++ b/keystone-moon/keystone/tests/unit/test_validation.py
@@ -21,11 +21,11 @@ from keystone.catalog import schema as catalog_schema
from keystone.common import validation
from keystone.common.validation import parameter_types
from keystone.common.validation import validators
-from keystone.contrib.endpoint_filter import schema as endpoint_filter_schema
-from keystone.contrib.federation import schema as federation_schema
from keystone.credential import schema as credential_schema
from keystone import exception
+from keystone.federation import schema as federation_schema
from keystone.identity import schema as identity_schema
+from keystone.oauth1 import schema as oauth1_schema
from keystone.policy import schema as policy_schema
from keystone.resource import schema as resource_schema
from keystone.tests import unit
@@ -67,6 +67,12 @@ entity_create = {
'additionalProperties': True,
}
+entity_create_optional_body = {
+ 'type': 'object',
+ 'properties': _entity_properties,
+ 'additionalProperties': True,
+}
+
entity_update = {
'type': 'object',
'properties': _entity_properties,
@@ -78,6 +84,8 @@ _VALID_ENABLED_FORMATS = [True, False]
_INVALID_ENABLED_FORMATS = ['some string', 1, 0, 'True', 'False']
+_INVALID_DESC_FORMATS = [False, 1, 2.0]
+
_VALID_URLS = ['https://example.com', 'http://EXAMPLE.com/v3',
'http://localhost', 'http://127.0.0.1:5000',
'http://1.1.1.1', 'http://255.255.255.255',
@@ -90,7 +98,7 @@ _VALID_URLS = ['https://example.com', 'http://EXAMPLE.com/v3',
_INVALID_URLS = [False, 'this is not a URL', 1234, 'www.example.com',
'localhost', 'http//something.com',
- 'https//something.com']
+ 'https//something.com', ' http://example.com']
_VALID_FILTERS = [{'interface': 'admin'},
{'region': 'US-WEST',
@@ -99,6 +107,17 @@ _VALID_FILTERS = [{'interface': 'admin'},
_INVALID_FILTERS = ['some string', 1, 0, True, False]
+def expected_validation_failure(msg):
+ def wrapper(f):
+ def wrapped(self, *args, **kwargs):
+ args = (self,) + args
+ e = self.assertRaises(exception.ValidationError, f,
+ *args, **kwargs)
+ self.assertIn(msg, six.text_type(e))
+ return wrapped
+ return wrapper
+
+
class ValidatedDecoratorTests(unit.BaseTestCase):
entity_schema = {
@@ -113,42 +132,51 @@ class ValidatedDecoratorTests(unit.BaseTestCase):
'name': uuid.uuid4().hex,
}
- invalid_entity = {}
-
- @validation.validated(entity_schema, 'entity')
- def do_something(self, entity):
- pass
+ invalid_entity = {
+ 'name': 1.0, # NOTE(dstanek): this is the incorrect type for name
+ }
@validation.validated(entity_create, 'entity')
def create_entity(self, entity):
- pass
+ """Used to test cases where validated param is the only param."""
+
+ @validation.validated(entity_create_optional_body, 'entity')
+ def create_entity_optional_body(self, entity):
+ """Used to test cases where there is an optional body."""
@validation.validated(entity_update, 'entity')
def update_entity(self, entity_id, entity):
- pass
+ """Used to test cases where validated param is not the only param."""
- def _assert_call_entity_method_fails(self, method, *args, **kwargs):
- e = self.assertRaises(exception.ValidationError, method,
- *args, **kwargs)
+ def test_calling_create_with_valid_entity_kwarg_succeeds(self):
+ self.create_entity(entity=self.valid_entity)
- self.assertIn('Expecting to find entity in request body',
- six.text_type(e))
+ def test_calling_create_with_empty_entity_kwarg_succeeds(self):
+ """Test the case when client passing in an empty kwarg reference."""
+ self.create_entity_optional_body(entity={})
- def test_calling_with_valid_entity_kwarg_succeeds(self):
- self.do_something(entity=self.valid_entity)
+ @expected_validation_failure('Expecting to find entity in request body')
+ def test_calling_create_with_kwarg_as_None_fails(self):
+ self.create_entity(entity=None)
- def test_calling_with_invalid_entity_kwarg_fails(self):
- self.assertRaises(exception.ValidationError,
- self.do_something,
- entity=self.invalid_entity)
+ def test_calling_create_with_valid_entity_arg_succeeds(self):
+ self.create_entity(self.valid_entity)
- def test_calling_with_valid_entity_arg_succeeds(self):
- self.do_something(self.valid_entity)
+ def test_calling_create_with_empty_entity_arg_succeeds(self):
+ """Test the case when client passing in an empty entity reference."""
+ self.create_entity_optional_body({})
- def test_calling_with_invalid_entity_arg_fails(self):
- self.assertRaises(exception.ValidationError,
- self.do_something,
- self.invalid_entity)
+ @expected_validation_failure("Invalid input for field 'name'")
+ def test_calling_create_with_invalid_entity_fails(self):
+ self.create_entity(self.invalid_entity)
+
+ @expected_validation_failure('Expecting to find entity in request body')
+ def test_calling_create_with_entity_arg_as_None_fails(self):
+ self.create_entity(None)
+
+ @expected_validation_failure('Expecting to find entity in request body')
+ def test_calling_create_without_an_entity_fails(self):
+ self.create_entity()
def test_using_the_wrong_name_with_the_decorator_fails(self):
with testtools.ExpectedException(TypeError):
@@ -156,24 +184,26 @@ class ValidatedDecoratorTests(unit.BaseTestCase):
def function(entity):
pass
- def test_create_entity_no_request_body_with_decorator(self):
- """Test the case when request body is not provided."""
- self._assert_call_entity_method_fails(self.create_entity)
+ # NOTE(dstanek): below are the test cases for making sure the validation
+ # works when the validated param is not the only param. Since all of the
+ # actual validation cases are tested above these test are for a sanity
+ # check.
- def test_create_entity_empty_request_body_with_decorator(self):
- """Test the case when client passing in an empty entity reference."""
- self._assert_call_entity_method_fails(self.create_entity, entity={})
+ def test_calling_update_with_valid_entity_succeeds(self):
+ self.update_entity(uuid.uuid4().hex, self.valid_entity)
- def test_update_entity_no_request_body_with_decorator(self):
- """Test the case when request body is not provided."""
- self._assert_call_entity_method_fails(self.update_entity,
- uuid.uuid4().hex)
+ @expected_validation_failure("Invalid input for field 'name'")
+ def test_calling_update_with_invalid_entity_fails(self):
+ self.update_entity(uuid.uuid4().hex, self.invalid_entity)
- def test_update_entity_empty_request_body_with_decorator(self):
+ def test_calling_update_with_empty_entity_kwarg_succeeds(self):
"""Test the case when client passing in an empty entity reference."""
- self._assert_call_entity_method_fails(self.update_entity,
- uuid.uuid4().hex,
- entity={})
+ global entity_update
+ original_entity_update = entity_update.copy()
+ # pop 'minProperties' from schema so that empty body is allowed.
+ entity_update.pop('minProperties')
+ self.update_entity(uuid.uuid4().hex, entity={})
+ entity_update = original_entity_update
class EntityValidationTestCase(unit.BaseTestCase):
@@ -499,11 +529,22 @@ class ProjectValidationTestCase(unit.BaseTestCase):
self.update_project_validator.validate,
request_to_validate)
- def test_validate_project_update_request_with_null_domain_id_fails(self):
- request_to_validate = {'domain_id': None}
- self.assertRaises(exception.SchemaValidationError,
- self.update_project_validator.validate,
- request_to_validate)
+ def test_validate_project_create_request_with_valid_domain_id(self):
+ """Test that we validate `domain_id` in create project requests."""
+ # domain_id is nullable
+ for domain_id in [None, uuid.uuid4().hex]:
+ request_to_validate = {'name': self.project_name,
+ 'domain_id': domain_id}
+ self.create_project_validator.validate(request_to_validate)
+
+ def test_validate_project_request_with_invalid_domain_id_fails(self):
+ """Exception is raised when `domain_id` is a non-id value."""
+ for domain_id in [False, 'fake_project']:
+ request_to_validate = {'name': self.project_name,
+ 'domain_id': domain_id}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_project_validator.validate,
+ request_to_validate)
class DomainValidationTestCase(unit.BaseTestCase):
@@ -897,6 +938,11 @@ class RegionValidationTestCase(unit.BaseTestCase):
request_to_validate = {'other_attr': uuid.uuid4().hex}
self.create_region_validator.validate(request_to_validate)
+ def test_validate_region_create_succeeds_with_no_parameters(self):
+ """Validate create region request with no parameters."""
+ request_to_validate = {}
+ self.create_region_validator.validate(request_to_validate)
+
def test_validate_region_update_succeeds(self):
"""Test that we validate a region update request."""
request_to_validate = {'id': 'us-west',
@@ -1298,8 +1344,8 @@ class EndpointGroupValidationTestCase(unit.BaseTestCase):
def setUp(self):
super(EndpointGroupValidationTestCase, self).setUp()
- create = endpoint_filter_schema.endpoint_group_create
- update = endpoint_filter_schema.endpoint_group_update
+ create = catalog_schema.endpoint_group_create
+ update = catalog_schema.endpoint_group_update
self.create_endpoint_grp_validator = validators.SchemaValidator(create)
self.update_endpoint_grp_validator = validators.SchemaValidator(update)
@@ -1321,8 +1367,7 @@ class EndpointGroupValidationTestCase(unit.BaseTestCase):
self.create_endpoint_grp_validator.validate(request_to_validate)
def test_validate_endpoint_group_create_succeeds_with_valid_filters(self):
- """Validate dict values as `filters` in endpoint group create requests.
- """
+ """Validate `filters` in endpoint group create requests."""
request_to_validate = {'description': 'endpoint group description',
'name': 'endpoint_group_name'}
for valid_filters in _VALID_FILTERS:
@@ -1718,13 +1763,8 @@ class UserValidationTestCase(unit.BaseTestCase):
def test_validate_user_create_with_all_valid_parameters_succeeds(self):
"""Test that validating a user create request succeeds."""
- request_to_validate = {'name': self.user_name,
- 'default_project_id': uuid.uuid4().hex,
- 'domain_id': uuid.uuid4().hex,
- 'description': uuid.uuid4().hex,
- 'enabled': True,
- 'email': uuid.uuid4().hex,
- 'password': uuid.uuid4().hex}
+ request_to_validate = unit.new_user_ref(domain_id=uuid.uuid4().hex,
+ name=self.user_name)
self.create_user_validator.validate(request_to_validate)
def test_validate_user_create_fails_without_name(self):
@@ -1875,3 +1915,201 @@ class GroupValidationTestCase(unit.BaseTestCase):
"""Validate group update requests with extra parameters."""
request_to_validate = {'other_attr': uuid.uuid4().hex}
self.update_group_validator.validate(request_to_validate)
+
+
+class IdentityProviderValidationTestCase(unit.BaseTestCase):
+ """Test for V3 Identity Provider API validation."""
+
+ def setUp(self):
+ super(IdentityProviderValidationTestCase, self).setUp()
+
+ create = federation_schema.identity_provider_create
+ update = federation_schema.identity_provider_update
+ self.create_idp_validator = validators.SchemaValidator(create)
+ self.update_idp_validator = validators.SchemaValidator(update)
+
+ def test_validate_idp_request_succeeds(self):
+ """Test that we validate an identity provider request."""
+ request_to_validate = {'description': 'identity provider description',
+ 'enabled': True,
+ 'remote_ids': [uuid.uuid4().hex,
+ uuid.uuid4().hex]}
+ self.create_idp_validator.validate(request_to_validate)
+ self.update_idp_validator.validate(request_to_validate)
+
+ def test_validate_idp_request_fails_with_invalid_params(self):
+ """Exception raised when unknown parameter is found."""
+ request_to_validate = {'bogus': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_idp_validator.validate,
+ request_to_validate)
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_idp_validator.validate,
+ request_to_validate)
+
+ def test_validate_idp_request_with_enabled(self):
+ """Validate `enabled` as boolean-like values."""
+ for valid_enabled in _VALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': valid_enabled}
+ self.create_idp_validator.validate(request_to_validate)
+ self.update_idp_validator.validate(request_to_validate)
+
+ def test_validate_idp_request_with_invalid_enabled_fails(self):
+ """Exception is raised when `enabled` isn't a boolean-like value."""
+ for invalid_enabled in _INVALID_ENABLED_FORMATS:
+ request_to_validate = {'enabled': invalid_enabled}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_idp_validator.validate,
+ request_to_validate)
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_idp_validator.validate,
+ request_to_validate)
+
+ def test_validate_idp_request_no_parameters(self):
+ """Test that schema validation with empty request body."""
+ request_to_validate = {}
+ self.create_idp_validator.validate(request_to_validate)
+
+ # Exception raised when no property on IdP update.
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_idp_validator.validate,
+ request_to_validate)
+
+ def test_validate_idp_request_with_invalid_description_fails(self):
+ """Exception is raised when `description` as a non-string value."""
+ request_to_validate = {'description': False}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_idp_validator.validate,
+ request_to_validate)
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_idp_validator.validate,
+ request_to_validate)
+
+ def test_validate_idp_request_with_invalid_remote_id_fails(self):
+ """Exception is raised when `remote_ids` is not a array."""
+ request_to_validate = {"remote_ids": uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_idp_validator.validate,
+ request_to_validate)
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_idp_validator.validate,
+ request_to_validate)
+
+ def test_validate_idp_request_with_duplicated_remote_id(self):
+ """Exception is raised when the duplicated `remote_ids` is found."""
+ idp_id = uuid.uuid4().hex
+ request_to_validate = {"remote_ids": [idp_id, idp_id]}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_idp_validator.validate,
+ request_to_validate)
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_idp_validator.validate,
+ request_to_validate)
+
+ def test_validate_idp_request_remote_id_nullable(self):
+ """Test that `remote_ids` could be explicitly set to None"""
+ request_to_validate = {'remote_ids': None}
+ self.create_idp_validator.validate(request_to_validate)
+ self.update_idp_validator.validate(request_to_validate)
+
+
+class FederationProtocolValidationTestCase(unit.BaseTestCase):
+ """Test for V3 Federation Protocol API validation."""
+
+ def setUp(self):
+ super(FederationProtocolValidationTestCase, self).setUp()
+
+ schema = federation_schema.federation_protocol_schema
+ # create protocol and update protocol have the same shema definition,
+ # combine them together, no need to validate separately.
+ self.protocol_validator = validators.SchemaValidator(schema)
+
+ def test_validate_protocol_request_succeeds(self):
+ """Test that we validate a protocol request successfully."""
+ request_to_validate = {'mapping_id': uuid.uuid4().hex}
+ self.protocol_validator.validate(request_to_validate)
+
+ def test_validate_protocol_request_succeeds_with_nonuuid_mapping_id(self):
+ """Test that we allow underscore in mapping_id value."""
+ request_to_validate = {'mapping_id': 'my_mapping_id'}
+ self.protocol_validator.validate(request_to_validate)
+
+ def test_validate_protocol_request_fails_with_invalid_params(self):
+ """Exception raised when unknown parameter is found."""
+ request_to_validate = {'bogus': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.protocol_validator.validate,
+ request_to_validate)
+
+ def test_validate_protocol_request_no_parameters(self):
+ """Test that schema validation with empty request body."""
+ request_to_validate = {}
+ # 'mapping_id' is required.
+ self.assertRaises(exception.SchemaValidationError,
+ self.protocol_validator.validate,
+ request_to_validate)
+
+ def test_validate_protocol_request_fails_with_invalid_mapping_id(self):
+ """Exception raised when mapping_id is not string."""
+ request_to_validate = {'mapping_id': 12334}
+ self.assertRaises(exception.SchemaValidationError,
+ self.protocol_validator.validate,
+ request_to_validate)
+
+
+class OAuth1ValidationTestCase(unit.BaseTestCase):
+ """Test for V3 Identity OAuth1 API validation."""
+
+ def setUp(self):
+ super(OAuth1ValidationTestCase, self).setUp()
+
+ create = oauth1_schema.consumer_create
+ update = oauth1_schema.consumer_update
+ self.create_consumer_validator = validators.SchemaValidator(create)
+ self.update_consumer_validator = validators.SchemaValidator(update)
+
+ def test_validate_consumer_request_succeeds(self):
+ """Test that we validate a consumer request successfully."""
+ request_to_validate = {'description': uuid.uuid4().hex,
+ 'name': uuid.uuid4().hex}
+ self.create_consumer_validator.validate(request_to_validate)
+ self.update_consumer_validator.validate(request_to_validate)
+
+ def test_validate_consumer_request_with_no_parameters(self):
+ """Test that schema validation with empty request body."""
+ request_to_validate = {}
+ self.create_consumer_validator.validate(request_to_validate)
+ # At least one property should be given.
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_consumer_validator.validate,
+ request_to_validate)
+
+ def test_validate_consumer_request_with_invalid_description_fails(self):
+ """Exception is raised when `description` as a non-string value."""
+ for invalid_desc in _INVALID_DESC_FORMATS:
+ request_to_validate = {'description': invalid_desc}
+ self.assertRaises(exception.SchemaValidationError,
+ self.create_consumer_validator.validate,
+ request_to_validate)
+
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_consumer_validator.validate,
+ request_to_validate)
+
+ def test_validate_update_consumer_request_fails_with_secret(self):
+ """Exception raised when secret is given."""
+ request_to_validate = {'secret': uuid.uuid4().hex}
+ self.assertRaises(exception.SchemaValidationError,
+ self.update_consumer_validator.validate,
+ request_to_validate)
+
+ def test_validate_consumer_request_with_none_desc(self):
+ """Test that schema validation with None desc."""
+ request_to_validate = {'description': None}
+ self.create_consumer_validator.validate(request_to_validate)
+ self.update_consumer_validator.validate(request_to_validate)
diff --git a/keystone-moon/keystone/tests/unit/test_versions.py b/keystone-moon/keystone/tests/unit/test_versions.py
index 40814588..2f5c2b17 100644
--- a/keystone-moon/keystone/tests/unit/test_versions.py
+++ b/keystone-moon/keystone/tests/unit/test_versions.py
@@ -25,9 +25,9 @@ from testtools import matchers as tt_matchers
import webob
from keystone.common import json_home
-from keystone import controllers
from keystone.tests import unit
from keystone.tests.unit import utils
+from keystone.version import controllers
CONF = cfg.CONF
@@ -74,9 +74,9 @@ v3_MEDIA_TYPES = [
]
v3_EXPECTED_RESPONSE = {
- "id": "v3.4",
+ "id": "v3.6",
"status": "stable",
- "updated": "2015-03-30T00:00:00Z",
+ "updated": "2016-04-04T00:00:00Z",
"links": [
{
"rel": "self",
@@ -131,6 +131,10 @@ _build_ep_filter_rel = functools.partial(
json_home.build_v3_extension_resource_relation,
extension_name='OS-EP-FILTER', extension_version='1.0')
+_build_os_inherit_rel = functools.partial(
+ json_home.build_v3_extension_resource_relation,
+ extension_name='OS-INHERIT', extension_version='1.0')
+
TRUST_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation(
'OS-TRUST', '1.0', 'trust_id')
@@ -169,13 +173,12 @@ BASE_EP_FILTER = BASE_EP_FILTER_PREFIX + '/endpoint_groups/{endpoint_group_id}'
BASE_ACCESS_TOKEN = (
'/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}')
-# TODO(stevemar): Use BASE_IDP_PROTOCOL when bug 1420125 is resolved.
-FEDERATED_AUTH_URL = ('/OS-FEDERATION/identity_providers/{identity_provider}'
- '/protocols/{protocol}/auth')
+FEDERATED_AUTH_URL = ('/OS-FEDERATION/identity_providers/{idp_id}'
+ '/protocols/{protocol_id}/auth')
FEDERATED_IDP_SPECIFIC_WEBSSO = ('/auth/OS-FEDERATION/identity_providers/'
'{idp_id}/protocols/{protocol_id}/websso')
-V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
+V3_JSON_HOME_RESOURCES = {
json_home.build_v3_resource_relation('auth_tokens'): {
'href': '/auth/tokens'},
json_home.build_v3_resource_relation('auth_catalog'): {
@@ -231,8 +234,8 @@ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
_build_ec2tokens_relation(resource_name='user_credential'): {
'href-template': '/users/{user_id}/credentials/OS-EC2/{credential_id}',
'href-vars': {
- 'credential_id': json_home.build_v3_extension_parameter_relation(
- 'OS-EC2', '1.0', 'credential_id'),
+ 'credential_id':
+ json_home.build_v3_parameter_relation('credential_id'),
'user_id': json_home.Parameters.USER_ID, }},
_build_ec2tokens_relation(resource_name='user_credentials'): {
'href-template': '/users/{user_id}/credentials/OS-EC2',
@@ -324,6 +327,22 @@ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
'href-template': '/roles/{role_id}',
'href-vars': {
'role_id': json_home.Parameters.ROLE_ID, }},
+ json_home.build_v3_resource_relation('implied_roles'): {
+ 'href-template': '/roles/{prior_role_id}/implies',
+ 'href-vars': {
+ 'prior_role_id': json_home.Parameters.ROLE_ID},
+ 'hints': {'status': 'experimental'}},
+ json_home.build_v3_resource_relation('implied_role'): {
+ 'href-template':
+ '/roles/{prior_role_id}/implies/{implied_role_id}',
+ 'href-vars': {
+ 'prior_role_id': json_home.Parameters.ROLE_ID,
+ 'implied_role_id': json_home.Parameters.ROLE_ID,
+ },
+ 'hints': {'status': 'experimental'}},
+ json_home.build_v3_resource_relation('role_inferences'): {
+ 'href': '/role_inferences',
+ 'hints': {'status': 'experimental'}},
json_home.build_v3_resource_relation('role_assignments'): {
'href': '/role_assignments'},
json_home.build_v3_resource_relation('roles'): {'href': '/roles'},
@@ -394,12 +413,11 @@ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
'href-template': BASE_IDP_PROTOCOL,
'href-vars': {
'idp_id': IDP_ID_PARAMETER_RELATION}},
- # TODO(stevemar): Update href-vars when bug 1420125 is resolved.
_build_federation_rel(resource_name='identity_provider_protocol_auth'): {
'href-template': FEDERATED_AUTH_URL,
'href-vars': {
- 'identity_provider': IDP_ID_PARAMETER_RELATION,
- 'protocol': PROTOCOL_ID_PARAM_RELATION, }},
+ 'idp_id': IDP_ID_PARAMETER_RELATION,
+ 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }},
_build_oauth1_rel(resource_name='access_tokens'): {
'href': '/OS-OAUTH1/access_token'},
_build_oauth1_rel(resource_name='request_tokens'): {
@@ -509,6 +527,58 @@ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
'href-template': BASE_EP_FILTER + '/projects',
'href-vars': {'endpoint_group_id':
ENDPOINT_GROUP_ID_PARAMETER_RELATION, }},
+ _build_os_inherit_rel(
+ resource_name='domain_user_role_inherited_to_projects'):
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
+ '{user_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ _build_os_inherit_rel(
+ resource_name='domain_group_role_inherited_to_projects'):
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
+ '{group_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'role_id': json_home.Parameters.ROLE_ID, }},
+ _build_os_inherit_rel(
+ resource_name='domain_user_roles_inherited_to_projects'):
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
+ '{user_id}/roles/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ _build_os_inherit_rel(
+ resource_name='domain_group_roles_inherited_to_projects'):
+ {
+ 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
+ '{group_id}/roles/inherited_to_projects',
+ 'href-vars': {
+ 'domain_id': json_home.Parameters.DOMAIN_ID,
+ 'group_id': json_home.Parameters.GROUP_ID, }},
+ _build_os_inherit_rel(
+ resource_name='project_user_role_inherited_to_projects'):
+ {
+ 'href-template': '/OS-INHERIT/projects/{project_id}/users/'
+ '{user_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'role_id': json_home.Parameters.ROLE_ID,
+ 'user_id': json_home.Parameters.USER_ID, }},
+ _build_os_inherit_rel(
+ resource_name='project_group_role_inherited_to_projects'):
+ {
+ 'href-template': '/OS-INHERIT/projects/{project_id}/groups/'
+ '{group_id}/roles/{role_id}/inherited_to_projects',
+ 'href-vars': {
+ 'project_id': json_home.Parameters.PROJECT_ID,
+ 'group_id': json_home.Parameters.GROUP_ID,
+ 'role_id': json_home.Parameters.ROLE_ID, }},
json_home.build_v3_resource_relation('domain_config'): {
'href-template':
'/domains/{domain_id}/config',
@@ -530,99 +600,23 @@ V3_JSON_HOME_RESOURCES_INHERIT_DISABLED = {
'group': json_home.build_v3_parameter_relation('config_group'),
'option': json_home.build_v3_parameter_relation('config_option')},
'hints': {'status': 'experimental'}},
+ json_home.build_v3_resource_relation('domain_config_default'): {
+ 'href': '/domains/config/default',
+ 'hints': {'status': 'experimental'}},
+ json_home.build_v3_resource_relation('domain_config_default_group'): {
+ 'href-template': '/domains/config/{group}/default',
+ 'href-vars': {
+ 'group': json_home.build_v3_parameter_relation('config_group')},
+ 'hints': {'status': 'experimental'}},
+ json_home.build_v3_resource_relation('domain_config_default_option'): {
+ 'href-template': '/domains/config/{group}/{option}/default',
+ 'href-vars': {
+ 'group': json_home.build_v3_parameter_relation('config_group'),
+ 'option': json_home.build_v3_parameter_relation('config_option')},
+ 'hints': {'status': 'experimental'}},
}
-# with os-inherit enabled, there's some more resources.
-
-build_os_inherit_relation = functools.partial(
- json_home.build_v3_extension_resource_relation,
- extension_name='OS-INHERIT', extension_version='1.0')
-
-V3_JSON_HOME_RESOURCES_INHERIT_ENABLED = dict(
- V3_JSON_HOME_RESOURCES_INHERIT_DISABLED)
-V3_JSON_HOME_RESOURCES_INHERIT_ENABLED.update(
- (
- (
- build_os_inherit_relation(
- resource_name='domain_user_role_inherited_to_projects'),
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
- '{user_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- 'user_id': json_home.Parameters.USER_ID,
- },
- }
- ),
- (
- build_os_inherit_relation(
- resource_name='domain_group_role_inherited_to_projects'),
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
- '{group_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group_id': json_home.Parameters.GROUP_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- },
- }
- ),
- (
- build_os_inherit_relation(
- resource_name='domain_user_roles_inherited_to_projects'),
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/users/'
- '{user_id}/roles/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'user_id': json_home.Parameters.USER_ID,
- },
- }
- ),
- (
- build_os_inherit_relation(
- resource_name='domain_group_roles_inherited_to_projects'),
- {
- 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/'
- '{group_id}/roles/inherited_to_projects',
- 'href-vars': {
- 'domain_id': json_home.Parameters.DOMAIN_ID,
- 'group_id': json_home.Parameters.GROUP_ID,
- },
- }
- ),
- (
- build_os_inherit_relation(
- resource_name='project_user_role_inherited_to_projects'),
- {
- 'href-template': '/OS-INHERIT/projects/{project_id}/users/'
- '{user_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'project_id': json_home.Parameters.PROJECT_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- 'user_id': json_home.Parameters.USER_ID,
- },
- }
- ),
- (
- build_os_inherit_relation(
- resource_name='project_group_role_inherited_to_projects'),
- {
- 'href-template': '/OS-INHERIT/projects/{project_id}/groups/'
- '{group_id}/roles/{role_id}/inherited_to_projects',
- 'href-vars': {
- 'project_id': json_home.Parameters.PROJECT_ID,
- 'group_id': json_home.Parameters.GROUP_ID,
- 'role_id': json_home.Parameters.ROLE_ID,
- },
- }
- ),
- )
-)
-
-
class TestClient(object):
def __init__(self, app=None, token=None):
self.app = app
@@ -751,7 +745,7 @@ class VersionTestCase(unit.TestCase):
def test_public_version_v2(self):
client = TestClient(self.public_app)
resp = client.get('/v2.0/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v2_VERSION_RESPONSE
self._paste_in_port(expected['version'],
@@ -762,7 +756,7 @@ class VersionTestCase(unit.TestCase):
def test_admin_version_v2(self):
client = TestClient(self.admin_app)
resp = client.get('/v2.0/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v2_VERSION_RESPONSE
self._paste_in_port(expected['version'],
@@ -775,7 +769,7 @@ class VersionTestCase(unit.TestCase):
for app in (self.public_app, self.admin_app):
client = TestClient(app)
resp = client.get('/v2.0/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v2_VERSION_RESPONSE
self._paste_in_port(expected['version'], 'http://localhost/v2.0/')
@@ -784,7 +778,7 @@ class VersionTestCase(unit.TestCase):
def test_public_version_v3(self):
client = TestClient(self.public_app)
resp = client.get('/v3/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v3_VERSION_RESPONSE
self._paste_in_port(expected['version'],
@@ -796,7 +790,7 @@ class VersionTestCase(unit.TestCase):
def test_admin_version_v3(self):
client = TestClient(self.admin_app)
resp = client.get('/v3/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v3_VERSION_RESPONSE
self._paste_in_port(expected['version'],
@@ -809,7 +803,7 @@ class VersionTestCase(unit.TestCase):
for app in (self.public_app, self.admin_app):
client = TestClient(app)
resp = client.get('/v3/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v3_VERSION_RESPONSE
self._paste_in_port(expected['version'], 'http://localhost/v3/')
@@ -824,7 +818,7 @@ class VersionTestCase(unit.TestCase):
# request to /v3 should pass
resp = client.get('/v3/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v3_VERSION_RESPONSE
self._paste_in_port(expected['version'],
@@ -857,7 +851,7 @@ class VersionTestCase(unit.TestCase):
# request to /v2.0 should pass
resp = client.get('/v2.0/')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
data = jsonutils.loads(resp.body)
expected = v2_VERSION_RESPONSE
self._paste_in_port(expected['version'],
@@ -897,7 +891,7 @@ class VersionTestCase(unit.TestCase):
# then the server responds with a JSON Home document.
exp_json_home_data = {
- 'resources': V3_JSON_HOME_RESOURCES_INHERIT_DISABLED}
+ 'resources': V3_JSON_HOME_RESOURCES}
self._test_json_home('/v3', exp_json_home_data)
@@ -906,7 +900,7 @@ class VersionTestCase(unit.TestCase):
# then the server responds with a JSON Home document.
exp_json_home_data = copy.deepcopy({
- 'resources': V3_JSON_HOME_RESOURCES_INHERIT_DISABLED})
+ 'resources': V3_JSON_HOME_RESOURCES})
json_home.translate_urls(exp_json_home_data, '/v3')
self._test_json_home('/', exp_json_home_data)
@@ -1022,45 +1016,6 @@ class VersionSingleAppTestCase(unit.TestCase):
self._test_version('admin')
-class VersionInheritEnabledTestCase(unit.TestCase):
- def setUp(self):
- super(VersionInheritEnabledTestCase, self).setUp()
- self.load_backends()
- self.public_app = self.loadapp('keystone', 'main')
- self.admin_app = self.loadapp('keystone', 'admin')
-
- self.config_fixture.config(
- public_endpoint='http://localhost:%(public_port)d',
- admin_endpoint='http://localhost:%(admin_port)d')
-
- def config_overrides(self):
- super(VersionInheritEnabledTestCase, self).config_overrides()
- admin_port = random.randint(10000, 30000)
- public_port = random.randint(40000, 60000)
- self.config_fixture.config(group='eventlet_server',
- public_port=public_port,
- admin_port=admin_port)
-
- self.config_fixture.config(group='os_inherit', enabled=True)
-
- def test_json_home_v3(self):
- # If the request is /v3 and the Accept header is application/json-home
- # then the server responds with a JSON Home document.
-
- client = TestClient(self.public_app)
- resp = client.get('/v3/', headers={'Accept': 'application/json-home'})
-
- self.assertThat(resp.status, tt_matchers.Equals('200 OK'))
- self.assertThat(resp.headers['Content-Type'],
- tt_matchers.Equals('application/json-home'))
-
- exp_json_home_data = {
- 'resources': V3_JSON_HOME_RESOURCES_INHERIT_ENABLED}
-
- self.assertThat(jsonutils.loads(resp.body),
- tt_matchers.Equals(exp_json_home_data))
-
-
class VersionBehindSslTestCase(unit.TestCase):
def setUp(self):
super(VersionBehindSslTestCase, self).setUp()
diff --git a/keystone-moon/keystone/tests/unit/test_wsgi.py b/keystone-moon/keystone/tests/unit/test_wsgi.py
index ed4c67d6..564d7406 100644
--- a/keystone-moon/keystone/tests/unit/test_wsgi.py
+++ b/keystone-moon/keystone/tests/unit/test_wsgi.py
@@ -85,7 +85,7 @@ class ApplicationTest(BaseWSGITest):
def test_response_content_type(self):
req = self._make_request()
resp = req.get_response(self.app)
- self.assertEqual(resp.content_type, 'application/json')
+ self.assertEqual('application/json', resp.content_type)
def test_query_string_available(self):
class FakeApp(wsgi.Application):
@@ -93,7 +93,7 @@ class ApplicationTest(BaseWSGITest):
return context['query_string']
req = self._make_request(url='/?1=2')
resp = req.get_response(FakeApp())
- self.assertEqual(jsonutils.loads(resp.body), {'1': '2'})
+ self.assertEqual({'1': '2'}, jsonutils.loads(resp.body))
def test_headers_available(self):
class FakeApp(wsgi.Application):
@@ -112,15 +112,16 @@ class ApplicationTest(BaseWSGITest):
resp = wsgi.render_response(body=data)
self.assertEqual('200 OK', resp.status)
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
self.assertEqual(body, resp.body)
self.assertEqual('X-Auth-Token', resp.headers.get('Vary'))
self.assertEqual(str(len(body)), resp.headers.get('Content-Length'))
def test_render_response_custom_status(self):
- resp = wsgi.render_response(status=(501, 'Not Implemented'))
+ resp = wsgi.render_response(
+ status=(http_client.NOT_IMPLEMENTED, 'Not Implemented'))
self.assertEqual('501 Not Implemented', resp.status)
- self.assertEqual(501, resp.status_int)
+ self.assertEqual(http_client.NOT_IMPLEMENTED, resp.status_int)
def test_successful_require_attribute(self):
app = FakeAttributeCheckerApp()
@@ -169,19 +170,31 @@ class ApplicationTest(BaseWSGITest):
self.assertEqual('Some-Value', resp.headers.get('Custom-Header'))
self.assertEqual('X-Auth-Token', resp.headers.get('Vary'))
+ def test_render_response_non_str_headers_converted(self):
+ resp = wsgi.render_response(
+ headers=[('Byte-Header', 'Byte-Value'),
+ (u'Unicode-Header', u'Unicode-Value')])
+ # assert that all headers are identified.
+ self.assertThat(resp.headers, matchers.HasLength(4))
+ self.assertEqual('Unicode-Value', resp.headers.get('Unicode-Header'))
+ # assert that unicode value is converted, the expected type is str
+ # on both python2 and python3.
+ self.assertEqual(str,
+ type(resp.headers.get('Unicode-Header')))
+
def test_render_response_no_body(self):
resp = wsgi.render_response()
self.assertEqual('204 No Content', resp.status)
- self.assertEqual(204, resp.status_int)
+ self.assertEqual(http_client.NO_CONTENT, resp.status_int)
self.assertEqual(b'', resp.body)
self.assertEqual('0', resp.headers.get('Content-Length'))
self.assertIsNone(resp.headers.get('Content-Type'))
def test_render_response_head_with_body(self):
resp = wsgi.render_response({'id': uuid.uuid4().hex}, method='HEAD')
- self.assertEqual(200, resp.status_int)
+ self.assertEqual(http_client.OK, resp.status_int)
self.assertEqual(b'', resp.body)
- self.assertNotEqual(resp.headers.get('Content-Length'), '0')
+ self.assertNotEqual('0', resp.headers.get('Content-Length'))
self.assertEqual('application/json', resp.headers.get('Content-Type'))
def test_application_local_config(self):
@@ -200,7 +213,9 @@ class ApplicationTest(BaseWSGITest):
def test_render_exception_host(self):
e = exception.Unauthorized(message=u'\u7f51\u7edc')
- context = {'host_url': 'http://%s:5000' % uuid.uuid4().hex}
+ req = self._make_request(url='/')
+ context = {'host_url': 'http://%s:5000' % uuid.uuid4().hex,
+ 'environment': req.environ}
resp = wsgi.render_exception(e, context=context)
self.assertEqual(http_client.UNAUTHORIZED, resp.status_int)
@@ -225,6 +240,77 @@ class ApplicationTest(BaseWSGITest):
self.assertEqual({'name': u'nonexit\xe8nt'},
jsonutils.loads(resp.body))
+ def test_base_url(self):
+ class FakeApp(wsgi.Application):
+ def index(self, context):
+ return self.base_url(context, 'public')
+ req = self._make_request(url='/')
+ # NOTE(gyee): according to wsgiref, if HTTP_HOST is present in the
+ # request environment, it will be used to construct the base url.
+ # SERVER_NAME and SERVER_PORT will be ignored. These are standard
+ # WSGI environment variables populated by the webserver.
+ req.environ.update({
+ 'SCRIPT_NAME': '/identity',
+ 'SERVER_NAME': '1.2.3.4',
+ 'wsgi.url_scheme': 'http',
+ 'SERVER_PORT': '80',
+ 'HTTP_HOST': '1.2.3.4',
+ })
+ resp = req.get_response(FakeApp())
+ self.assertEqual(b"http://1.2.3.4/identity", resp.body)
+
+ # if HTTP_HOST is absent, SERVER_NAME and SERVER_PORT will be used
+ req = self._make_request(url='/')
+ del req.environ['HTTP_HOST']
+ req.environ.update({
+ 'SCRIPT_NAME': '/identity',
+ 'SERVER_NAME': '1.1.1.1',
+ 'wsgi.url_scheme': 'http',
+ 'SERVER_PORT': '1234',
+ })
+ resp = req.get_response(FakeApp())
+ self.assertEqual(b"http://1.1.1.1:1234/identity", resp.body)
+
+ # make sure keystone normalize the standard HTTP port 80 by stripping
+ # it
+ req = self._make_request(url='/')
+ req.environ.update({'HTTP_HOST': 'foo:80',
+ 'SCRIPT_NAME': '/identity'})
+ resp = req.get_response(FakeApp())
+ self.assertEqual(b"http://foo/identity", resp.body)
+
+ # make sure keystone normalize the standard HTTPS port 443 by stripping
+ # it
+ req = self._make_request(url='/')
+ req.environ.update({'HTTP_HOST': 'foo:443',
+ 'SCRIPT_NAME': '/identity',
+ 'wsgi.url_scheme': 'https'})
+ resp = req.get_response(FakeApp())
+ self.assertEqual(b"https://foo/identity", resp.body)
+
+ # make sure non-standard port is preserved
+ req = self._make_request(url='/')
+ req.environ.update({'HTTP_HOST': 'foo:1234',
+ 'SCRIPT_NAME': '/identity'})
+ resp = req.get_response(FakeApp())
+ self.assertEqual(b"http://foo:1234/identity", resp.body)
+
+ # make sure version portion of the SCRIPT_NAME, '/v2.0', is stripped
+ # from base url
+ req = self._make_request(url='/')
+ req.environ.update({'HTTP_HOST': 'foo:80',
+ 'SCRIPT_NAME': '/bar/identity/v2.0'})
+ resp = req.get_response(FakeApp())
+ self.assertEqual(b"http://foo/bar/identity", resp.body)
+
+ # make sure version portion of the SCRIPT_NAME, '/v3' is stripped from
+ # base url
+ req = self._make_request(url='/')
+ req.environ.update({'HTTP_HOST': 'foo:80',
+ 'SCRIPT_NAME': '/identity/v3'})
+ resp = req.get_response(FakeApp())
+ self.assertEqual(b"http://foo/identity", resp.body)
+
class ExtensionRouterTest(BaseWSGITest):
def test_extensionrouter_local_config(self):
@@ -293,24 +379,15 @@ class MiddlewareTest(BaseWSGITest):
self.assertEqual(exception.UnexpectedError.code, resp.status_int)
return resp
- # Exception data should not be in the message when debug is False
- self.config_fixture.config(debug=False)
+ # Exception data should not be in the message when insecure_debug is
+ # False
+ self.config_fixture.config(debug=False, insecure_debug=False)
self.assertNotIn(exception_str, do_request().body)
- # Exception data should be in the message when debug is True
- self.config_fixture.config(debug=True)
+ # Exception data should be in the message when insecure_debug is True
+ self.config_fixture.config(debug=True, insecure_debug=True)
self.assertIn(exception_str, do_request().body)
- def test_middleware_local_config(self):
- class FakeMiddleware(wsgi.Middleware):
- def __init__(self, *args, **kwargs):
- self.kwargs = kwargs
-
- factory = FakeMiddleware.factory({}, testkey="test")
- app = factory(self.app)
- self.assertIn("testkey", app.kwargs)
- self.assertEqual("test", app.kwargs["testkey"])
-
class LocalizedResponseTest(unit.TestCase):
def test_request_match_default(self):
@@ -345,8 +422,8 @@ class LocalizedResponseTest(unit.TestCase):
def test_static_translated_string_is_lazy_translatable(self):
# Statically created message strings are an object that can get
# lazy-translated rather than a regular string.
- self.assertNotEqual(type(exception.Unauthorized.message_format),
- six.text_type)
+ self.assertNotEqual(six.text_type,
+ type(exception.Unauthorized.message_format))
@mock.patch.object(oslo_i18n, 'get_available_languages')
def test_get_localized_response(self, mock_gal):
@@ -457,12 +534,14 @@ class ServerTest(unit.TestCase):
server.start()
self.addCleanup(server.stop)
- self.assertEqual(2, mock_sock_dup.setsockopt.call_count)
-
- # Test the last set of call args i.e. for the keepidle
- mock_sock_dup.setsockopt.assert_called_with(socket.IPPROTO_TCP,
- socket.TCP_KEEPIDLE,
- 1)
+ if hasattr(socket, 'TCP_KEEPIDLE'):
+ self.assertEqual(2, mock_sock_dup.setsockopt.call_count)
+ # Test the last set of call args i.e. for the keepidle
+ mock_sock_dup.setsockopt.assert_called_with(socket.IPPROTO_TCP,
+ socket.TCP_KEEPIDLE,
+ 1)
+ else:
+ self.assertEqual(1, mock_sock_dup.setsockopt.call_count)
self.assertTrue(mock_listen.called)
diff --git a/keystone-moon/keystone/tests/unit/tests/test_core.py b/keystone-moon/keystone/tests/unit/tests/test_core.py
index 50f1309e..56e42bcc 100644
--- a/keystone-moon/keystone/tests/unit/tests/test_core.py
+++ b/keystone-moon/keystone/tests/unit/tests/test_core.py
@@ -39,7 +39,7 @@ class TestTestCase(unit.TestCase):
# If the arguments are invalid for the string in a log it raises an
# exception during testing.
self.assertThat(
- lambda: LOG.warn('String %(p1)s %(p2)s', {'p1': 'something'}),
+ lambda: LOG.warning('String %(p1)s %(p2)s', {'p1': 'something'}),
matchers.raises(KeyError))
def test_sa_warning(self):
diff --git a/keystone-moon/keystone/tests/unit/token/test_backends.py b/keystone-moon/keystone/tests/unit/token/test_backends.py
new file mode 100644
index 00000000..feb7e017
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/token/test_backends.py
@@ -0,0 +1,551 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import datetime
+import hashlib
+import uuid
+
+from keystoneclient.common import cms
+from oslo_config import cfg
+from oslo_utils import timeutils
+import six
+from six.moves import range
+
+from keystone import exception
+from keystone.tests import unit
+from keystone.tests.unit import utils as test_utils
+from keystone.token import provider
+
+
+CONF = cfg.CONF
+NULL_OBJECT = object()
+
+
+class TokenTests(object):
+ def _create_token_id(self):
+ # Use a token signed by the cms module
+ token_id = ""
+ for i in range(1, 20):
+ token_id += uuid.uuid4().hex
+ return cms.cms_sign_token(token_id,
+ CONF.signing.certfile,
+ CONF.signing.keyfile)
+
+ def _assert_revoked_token_list_matches_token_persistence(
+ self, revoked_token_id_list):
+ # Assert that the list passed in matches the list returned by the
+ # token persistence service
+ persistence_list = [
+ x['id']
+ for x in self.token_provider_api.list_revoked_tokens()
+ ]
+ self.assertEqual(persistence_list, revoked_token_id_list)
+
+ def test_token_crud(self):
+ token_id = self._create_token_id()
+ data = {'id': token_id, 'a': 'b',
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'},
+ 'token_data': {'access': {'token': {
+ 'audit_ids': [uuid.uuid4().hex]}}}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ expires = data_ref.pop('expires')
+ data_ref.pop('user_id')
+ self.assertIsInstance(expires, datetime.datetime)
+ data_ref.pop('id')
+ data.pop('id')
+ self.assertDictEqual(data, data_ref)
+
+ new_data_ref = self.token_provider_api._persistence.get_token(token_id)
+ expires = new_data_ref.pop('expires')
+ self.assertIsInstance(expires, datetime.datetime)
+ new_data_ref.pop('user_id')
+ new_data_ref.pop('id')
+
+ self.assertEqual(data, new_data_ref)
+
+ self.token_provider_api._persistence.delete_token(token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token, token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.delete_token, token_id)
+
+ def create_token_sample_data(self, token_id=None, tenant_id=None,
+ trust_id=None, user_id=None, expires=None):
+ if token_id is None:
+ token_id = self._create_token_id()
+ if user_id is None:
+ user_id = 'testuserid'
+ # FIXME(morganfainberg): These tokens look nothing like "Real" tokens.
+ # This should be fixed when token issuance is cleaned up.
+ data = {'id': token_id, 'a': 'b',
+ 'user': {'id': user_id},
+ 'access': {'token': {'audit_ids': [uuid.uuid4().hex]}}}
+ if tenant_id is not None:
+ data['tenant'] = {'id': tenant_id, 'name': tenant_id}
+ if tenant_id is NULL_OBJECT:
+ data['tenant'] = None
+ if expires is not None:
+ data['expires'] = expires
+ if trust_id is not None:
+ data['trust_id'] = trust_id
+ data['access'].setdefault('trust', {})
+ # Testuserid2 is used here since a trustee will be different in
+ # the cases of impersonation and therefore should not match the
+ # token's user_id.
+ data['access']['trust']['trustee_user_id'] = 'testuserid2'
+ data['token_version'] = provider.V2
+ # Issue token stores a copy of all token data at token['token_data'].
+ # This emulates that assumption as part of the test.
+ data['token_data'] = copy.deepcopy(data)
+ new_token = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ return new_token['id'], data
+
+ def test_delete_tokens(self):
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(0, len(tokens))
+ token_id1, data = self.create_token_sample_data(
+ tenant_id='testtenantid')
+ token_id2, data = self.create_token_sample_data(
+ tenant_id='testtenantid')
+ token_id3, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ user_id='testuserid1')
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(2, len(tokens))
+ self.assertIn(token_id2, tokens)
+ self.assertIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_tokens(
+ user_id='testuserid',
+ tenant_id='testtenantid')
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(0, len(tokens))
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id1)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id2)
+
+ self.token_provider_api._persistence.get_token(token_id3)
+
+ def test_delete_tokens_trust(self):
+ tokens = self.token_provider_api._persistence._list_tokens(
+ user_id='testuserid')
+ self.assertEqual(0, len(tokens))
+ token_id1, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ trust_id='testtrustid')
+ token_id2, data = self.create_token_sample_data(
+ tenant_id='testtenantid',
+ user_id='testuserid1',
+ trust_id='testtrustid1')
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_tokens(
+ user_id='testuserid',
+ tenant_id='testtenantid',
+ trust_id='testtrustid')
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id1)
+ self.token_provider_api._persistence.get_token(token_id2)
+
+ def _test_token_list(self, token_list_fn):
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(0, len(tokens))
+ token_id1, data = self.create_token_sample_data()
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id1, tokens)
+ token_id2, data = self.create_token_sample_data()
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(2, len(tokens))
+ self.assertIn(token_id2, tokens)
+ self.assertIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_token(token_id1)
+ tokens = token_list_fn('testuserid')
+ self.assertIn(token_id2, tokens)
+ self.assertNotIn(token_id1, tokens)
+ self.token_provider_api._persistence.delete_token(token_id2)
+ tokens = token_list_fn('testuserid')
+ self.assertNotIn(token_id2, tokens)
+ self.assertNotIn(token_id1, tokens)
+
+ # tenant-specific tokens
+ tenant1 = uuid.uuid4().hex
+ tenant2 = uuid.uuid4().hex
+ token_id3, data = self.create_token_sample_data(tenant_id=tenant1)
+ token_id4, data = self.create_token_sample_data(tenant_id=tenant2)
+ # test for existing but empty tenant (LP:1078497)
+ token_id5, data = self.create_token_sample_data(tenant_id=NULL_OBJECT)
+ tokens = token_list_fn('testuserid')
+ self.assertEqual(3, len(tokens))
+ self.assertNotIn(token_id1, tokens)
+ self.assertNotIn(token_id2, tokens)
+ self.assertIn(token_id3, tokens)
+ self.assertIn(token_id4, tokens)
+ self.assertIn(token_id5, tokens)
+ tokens = token_list_fn('testuserid', tenant2)
+ self.assertEqual(1, len(tokens))
+ self.assertNotIn(token_id1, tokens)
+ self.assertNotIn(token_id2, tokens)
+ self.assertNotIn(token_id3, tokens)
+ self.assertIn(token_id4, tokens)
+
+ def test_token_list(self):
+ self._test_token_list(
+ self.token_provider_api._persistence._list_tokens)
+
+ def test_token_list_trust(self):
+ trust_id = uuid.uuid4().hex
+ token_id5, data = self.create_token_sample_data(trust_id=trust_id)
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid', trust_id=trust_id)
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id5, tokens)
+
+ def test_get_token_returns_not_found(self):
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ uuid.uuid4().hex)
+
+ def test_delete_token_returns_not_found(self):
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.delete_token,
+ uuid.uuid4().hex)
+
+ def test_expired_token(self):
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data, data_ref)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ token_id)
+
+ def test_null_expires_token(self):
+ token_id = uuid.uuid4().hex
+ data = {'id': token_id, 'id_hash': token_id, 'a': 'b', 'expires': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ self.assertIsNotNone(data_ref['expires'])
+ new_data_ref = self.token_provider_api._persistence.get_token(token_id)
+
+ # MySQL doesn't store microseconds, so discard them before testing
+ data_ref['expires'] = data_ref['expires'].replace(microsecond=0)
+ new_data_ref['expires'] = new_data_ref['expires'].replace(
+ microsecond=0)
+
+ self.assertEqual(data_ref, new_data_ref)
+
+ def check_list_revoked_tokens(self, token_infos):
+ revocation_list = self.token_provider_api.list_revoked_tokens()
+ revoked_ids = [x['id'] for x in revocation_list]
+ revoked_audit_ids = [x['audit_id'] for x in revocation_list]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ for token_id, audit_id in token_infos:
+ self.assertIn(token_id, revoked_ids)
+ self.assertIn(audit_id, revoked_audit_ids)
+
+ def delete_token(self):
+ token_id = uuid.uuid4().hex
+ audit_id = uuid.uuid4().hex
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'user': {'id': 'testuserid'},
+ 'token_data': {'token': {'audit_ids': [audit_id]}}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ self.token_provider_api._persistence.delete_token(token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ data_ref['id'])
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api._persistence.delete_token,
+ data_ref['id'])
+ return (token_id, audit_id)
+
+ def test_list_revoked_tokens_returns_empty_list(self):
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertEqual([], revoked_ids)
+
+ def test_list_revoked_tokens_for_single_token(self):
+ self.check_list_revoked_tokens([self.delete_token()])
+
+ def test_list_revoked_tokens_for_multiple_tokens(self):
+ self.check_list_revoked_tokens([self.delete_token()
+ for x in range(2)])
+
+ def test_flush_expired_token(self):
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data, data_ref)
+
+ token_id = uuid.uuid4().hex
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
+ data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'}}
+ data_ref = self.token_provider_api._persistence.create_token(token_id,
+ data)
+ data_ref.pop('user_id')
+ self.assertDictEqual(data, data_ref)
+
+ self.token_provider_api._persistence.flush_expired_tokens()
+ tokens = self.token_provider_api._persistence._list_tokens(
+ 'testuserid')
+ self.assertEqual(1, len(tokens))
+ self.assertIn(token_id, tokens)
+
+ @unit.skip_if_cache_disabled('token')
+ def test_revocation_list_cache(self):
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=10)
+ token_id = uuid.uuid4().hex
+ token_data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'},
+ 'token_data': {'token': {
+ 'audit_ids': [uuid.uuid4().hex]}}}
+ token2_id = uuid.uuid4().hex
+ token2_data = {'id_hash': token2_id, 'id': token2_id, 'a': 'b',
+ 'expires': expire_time,
+ 'trust_id': None,
+ 'user': {'id': 'testuserid'},
+ 'token_data': {'token': {
+ 'audit_ids': [uuid.uuid4().hex]}}}
+ # Create 2 Tokens.
+ self.token_provider_api._persistence.create_token(token_id,
+ token_data)
+ self.token_provider_api._persistence.create_token(token2_id,
+ token2_data)
+ # Verify the revocation list is empty.
+ self.assertEqual(
+ [], self.token_provider_api._persistence.list_revoked_tokens())
+ self.assertEqual([], self.token_provider_api.list_revoked_tokens())
+ # Delete a token directly, bypassing the manager.
+ self.token_provider_api._persistence.driver.delete_token(token_id)
+ # Verify the revocation list is still empty.
+ self.assertEqual(
+ [], self.token_provider_api._persistence.list_revoked_tokens())
+ self.assertEqual([], self.token_provider_api.list_revoked_tokens())
+ # Invalidate the revocation list.
+ self.token_provider_api._persistence.invalidate_revocation_list()
+ # Verify the deleted token is in the revocation list.
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertIn(token_id, revoked_ids)
+ # Delete the second token, through the manager
+ self.token_provider_api._persistence.delete_token(token2_id)
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ # Verify both tokens are in the revocation list.
+ self.assertIn(token_id, revoked_ids)
+ self.assertIn(token2_id, revoked_ids)
+
+ def _test_predictable_revoked_pki_token_id(self, hash_fn):
+ token_id = self._create_token_id()
+ token_id_hash = hash_fn(token_id.encode('utf-8')).hexdigest()
+ token = {'user': {'id': uuid.uuid4().hex},
+ 'token_data': {'token': {'audit_ids': [uuid.uuid4().hex]}}}
+
+ self.token_provider_api._persistence.create_token(token_id, token)
+ self.token_provider_api._persistence.delete_token(token_id)
+
+ revoked_ids = [x['id']
+ for x in self.token_provider_api.list_revoked_tokens()]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertIn(token_id_hash, revoked_ids)
+ self.assertNotIn(token_id, revoked_ids)
+ for t in self.token_provider_api._persistence.list_revoked_tokens():
+ self.assertIn('expires', t)
+
+ def test_predictable_revoked_pki_token_id_default(self):
+ self._test_predictable_revoked_pki_token_id(hashlib.md5)
+
+ def test_predictable_revoked_pki_token_id_sha256(self):
+ self.config_fixture.config(group='token', hash_algorithm='sha256')
+ self._test_predictable_revoked_pki_token_id(hashlib.sha256)
+
+ def test_predictable_revoked_uuid_token_id(self):
+ token_id = uuid.uuid4().hex
+ token = {'user': {'id': uuid.uuid4().hex},
+ 'token_data': {'token': {'audit_ids': [uuid.uuid4().hex]}}}
+
+ self.token_provider_api._persistence.create_token(token_id, token)
+ self.token_provider_api._persistence.delete_token(token_id)
+
+ revoked_tokens = self.token_provider_api.list_revoked_tokens()
+ revoked_ids = [x['id'] for x in revoked_tokens]
+ self._assert_revoked_token_list_matches_token_persistence(revoked_ids)
+ self.assertIn(token_id, revoked_ids)
+ for t in revoked_tokens:
+ self.assertIn('expires', t)
+
+ def test_create_unicode_token_id(self):
+ token_id = six.text_type(self._create_token_id())
+ self.create_token_sample_data(token_id=token_id)
+ self.token_provider_api._persistence.get_token(token_id)
+
+ def test_create_unicode_user_id(self):
+ user_id = six.text_type(uuid.uuid4().hex)
+ token_id, data = self.create_token_sample_data(user_id=user_id)
+ self.token_provider_api._persistence.get_token(token_id)
+
+ def test_token_expire_timezone(self):
+
+ @test_utils.timezone
+ def _create_token(expire_time):
+ token_id = uuid.uuid4().hex
+ user_id = six.text_type(uuid.uuid4().hex)
+ return self.create_token_sample_data(token_id=token_id,
+ user_id=user_id,
+ expires=expire_time)
+
+ for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']:
+ test_utils.TZ = 'UTC' + d
+ expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1)
+ token_id, data_in = _create_token(expire_time)
+ data_get = self.token_provider_api._persistence.get_token(token_id)
+
+ self.assertEqual(data_in['id'], data_get['id'],
+ 'TZ=%s' % test_utils.TZ)
+
+ expire_time_expired = (
+ timeutils.utcnow() + datetime.timedelta(minutes=-1))
+ token_id, data_in = _create_token(expire_time_expired)
+ self.assertRaises(exception.TokenNotFound,
+ self.token_provider_api._persistence.get_token,
+ data_in['id'])
+
+
+class TokenCacheInvalidation(object):
+ def _create_test_data(self):
+ self.user = unit.new_user_ref(
+ domain_id=CONF.identity.default_domain_id)
+ self.tenant = unit.new_project_ref(
+ domain_id=CONF.identity.default_domain_id)
+
+ # Create an equivalent of a scoped token
+ token_dict = {'user': self.user, 'tenant': self.tenant,
+ 'metadata': {}, 'id': 'placeholder'}
+ token_id, data = self.token_provider_api.issue_v2_token(token_dict)
+ self.scoped_token_id = token_id
+
+ # ..and an un-scoped one
+ token_dict = {'user': self.user, 'tenant': None,
+ 'metadata': {}, 'id': 'placeholder'}
+ token_id, data = self.token_provider_api.issue_v2_token(token_dict)
+ self.unscoped_token_id = token_id
+
+ # Validate them, in the various ways possible - this will load the
+ # responses into the token cache.
+ self._check_scoped_tokens_are_valid()
+ self._check_unscoped_tokens_are_valid()
+
+ def _check_unscoped_tokens_are_invalid(self):
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ self.unscoped_token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ self.unscoped_token_id)
+
+ def _check_scoped_tokens_are_invalid(self):
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ self.scoped_token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_token,
+ self.scoped_token_id,
+ self.tenant['id'])
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ self.scoped_token_id)
+ self.assertRaises(
+ exception.TokenNotFound,
+ self.token_provider_api.validate_v2_token,
+ self.scoped_token_id,
+ self.tenant['id'])
+
+ def _check_scoped_tokens_are_valid(self):
+ self.token_provider_api.validate_token(self.scoped_token_id)
+ self.token_provider_api.validate_token(
+ self.scoped_token_id, belongs_to=self.tenant['id'])
+ self.token_provider_api.validate_v2_token(self.scoped_token_id)
+ self.token_provider_api.validate_v2_token(
+ self.scoped_token_id, belongs_to=self.tenant['id'])
+
+ def _check_unscoped_tokens_are_valid(self):
+ self.token_provider_api.validate_token(self.unscoped_token_id)
+ self.token_provider_api.validate_v2_token(self.unscoped_token_id)
+
+ def test_delete_unscoped_token(self):
+ self.token_provider_api._persistence.delete_token(
+ self.unscoped_token_id)
+ self._check_unscoped_tokens_are_invalid()
+ self._check_scoped_tokens_are_valid()
+
+ def test_delete_scoped_token_by_id(self):
+ self.token_provider_api._persistence.delete_token(self.scoped_token_id)
+ self._check_scoped_tokens_are_invalid()
+ self._check_unscoped_tokens_are_valid()
+
+ def test_delete_scoped_token_by_user(self):
+ self.token_provider_api._persistence.delete_tokens(self.user['id'])
+ # Since we are deleting all tokens for this user, they should all
+ # now be invalid.
+ self._check_scoped_tokens_are_invalid()
+ self._check_unscoped_tokens_are_invalid()
+
+ def test_delete_scoped_token_by_user_and_tenant(self):
+ self.token_provider_api._persistence.delete_tokens(
+ self.user['id'],
+ tenant_id=self.tenant['id'])
+ self._check_scoped_tokens_are_invalid()
+ self._check_unscoped_tokens_are_valid()
diff --git a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
index bfb590db..5f51d7b3 100644
--- a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
+++ b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py
@@ -22,8 +22,8 @@ from six.moves import urllib
from keystone.common import config
from keystone.common import utils
-from keystone.contrib.federation import constants as federation_constants
from keystone import exception
+from keystone.federation import constants as federation_constants
from keystone.tests import unit
from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import database
@@ -48,17 +48,25 @@ class TestFernetTokenProvider(unit.TestCase):
def test_needs_persistence_returns_false(self):
self.assertFalse(self.provider.needs_persistence())
- def test_invalid_v3_token_raises_404(self):
- self.assertRaises(
+ def test_invalid_v3_token_raises_token_not_found(self):
+ # NOTE(lbragstad): Here we use the validate_non_persistent_token()
+ # methods because the validate_v3_token() method is strictly for
+ # validating UUID formatted tokens. It is written to assume cached
+ # tokens from a backend, where validate_non_persistent_token() is not.
+ token_id = uuid.uuid4().hex
+ e = self.assertRaises(
exception.TokenNotFound,
- self.provider.validate_v3_token,
- uuid.uuid4().hex)
+ self.provider.validate_non_persistent_token,
+ token_id)
+ self.assertIn(token_id, u'%s' % e)
- def test_invalid_v2_token_raises_404(self):
- self.assertRaises(
+ def test_invalid_v2_token_raises_token_not_found(self):
+ token_id = uuid.uuid4().hex
+ e = self.assertRaises(
exception.TokenNotFound,
- self.provider.validate_v2_token,
- uuid.uuid4().hex)
+ self.provider.validate_non_persistent_token,
+ token_id)
+ self.assertIn(token_id, u'%s' % e)
class TestValidate(unit.TestCase):
@@ -91,7 +99,6 @@ class TestValidate(unit.TestCase):
token = token_data['token']
self.assertIsInstance(token['audit_ids'], list)
self.assertIsInstance(token['expires_at'], str)
- self.assertEqual({}, token['extras'])
self.assertIsInstance(token['issued_at'], str)
self.assertEqual(method_names, token['methods'])
exp_user_info = {
@@ -200,7 +207,7 @@ class TestValidate(unit.TestCase):
def test_validate_v3_token_validation_error_exc(self):
# When the token format isn't recognized, TokenNotFound is raised.
- # A uuid string isn't a valid fernet token.
+ # A uuid string isn't a valid Fernet token.
token_id = uuid.uuid4().hex
self.assertRaises(exception.TokenNotFound,
self.token_provider_api.validate_v3_token, token_id)
@@ -214,10 +221,14 @@ class TestTokenFormatter(unit.TestCase):
def test_restore_padding(self):
# 'a' will result in '==' padding, 'aa' will result in '=' padding, and
# 'aaa' will result in no padding.
- strings_to_test = ['a', 'aa', 'aaa']
-
- for string in strings_to_test:
- encoded_string = base64.urlsafe_b64encode(string)
+ binary_to_test = [b'a', b'aa', b'aaa']
+
+ for binary in binary_to_test:
+ # base64.urlsafe_b64encode takes six.binary_type and returns
+ # six.binary_type.
+ encoded_string = base64.urlsafe_b64encode(binary)
+ encoded_string = encoded_string.decode('utf-8')
+ # encoded_string is now six.text_type.
encoded_str_without_padding = encoded_string.rstrip('=')
self.assertFalse(encoded_str_without_padding.endswith('='))
encoded_str_with_padding_restored = (
@@ -231,36 +242,57 @@ class TestTokenFormatter(unit.TestCase):
second_value = uuid.uuid4().hex
payload = (first_value, second_value)
msgpack_payload = msgpack.packb(payload)
+ # msgpack_payload is six.binary_type.
+
+ tf = token_formatters.TokenFormatter()
- # NOTE(lbragstad): This method perserves the way that keystone used to
+ # NOTE(lbragstad): This method preserves the way that keystone used to
# percent encode the tokens, prior to bug #1491926.
def legacy_pack(payload):
- tf = token_formatters.TokenFormatter()
+ # payload is six.binary_type.
encrypted_payload = tf.crypto.encrypt(payload)
+ # encrypted_payload is six.binary_type.
# the encrypted_payload is returned with padding appended
- self.assertTrue(encrypted_payload.endswith('='))
+ self.assertTrue(encrypted_payload.endswith(b'='))
# using urllib.parse.quote will percent encode the padding, like
# keystone did in Kilo.
percent_encoded_payload = urllib.parse.quote(encrypted_payload)
+ # percent_encoded_payload is six.text_type.
- # ensure that the padding was actaully percent encoded
+ # ensure that the padding was actually percent encoded
self.assertTrue(percent_encoded_payload.endswith('%3D'))
return percent_encoded_payload
token_with_legacy_padding = legacy_pack(msgpack_payload)
- tf = token_formatters.TokenFormatter()
+ # token_with_legacy_padding is six.text_type.
# demonstrate the we can validate a payload that has been percent
# encoded with the Fernet logic that existed in Kilo
serialized_payload = tf.unpack(token_with_legacy_padding)
+ # serialized_payload is six.binary_type.
returned_payload = msgpack.unpackb(serialized_payload)
- self.assertEqual(first_value, returned_payload[0])
- self.assertEqual(second_value, returned_payload[1])
+ # returned_payload contains six.binary_type.
+ self.assertEqual(first_value, returned_payload[0].decode('utf-8'))
+ self.assertEqual(second_value, returned_payload[1].decode('utf-8'))
class TestPayloads(unit.TestCase):
+ def assertTimestampsEqual(self, expected, actual):
+ # The timestamp that we get back when parsing the payload may not
+ # exactly match the timestamp that was put in the payload due to
+ # conversion to and from a float.
+
+ exp_time = timeutils.parse_isotime(expected)
+ actual_time = timeutils.parse_isotime(actual)
+
+ # the granularity of timestamp string is microseconds and it's only the
+ # last digit in the representation that's different, so use a delta
+ # just above nanoseconds.
+ return self.assertCloseEnoughForGovernmentWork(exp_time, actual_time,
+ delta=1e-05)
+
def test_uuid_hex_to_byte_conversions(self):
payload_cls = token_formatters.BasePayload
@@ -274,249 +306,137 @@ class TestPayloads(unit.TestCase):
expected_uuid_in_bytes)
self.assertEqual(expected_hex_uuid, actual_hex_uuid)
- def test_time_string_to_int_conversions(self):
+ def test_time_string_to_float_conversions(self):
payload_cls = token_formatters.BasePayload
- expected_time_str = utils.isotime(subsecond=True)
- time_obj = timeutils.parse_isotime(expected_time_str)
- expected_time_int = (
+ original_time_str = utils.isotime(subsecond=True)
+ time_obj = timeutils.parse_isotime(original_time_str)
+ expected_time_float = (
(timeutils.normalize_time(time_obj) -
datetime.datetime.utcfromtimestamp(0)).total_seconds())
- actual_time_int = payload_cls._convert_time_string_to_int(
- expected_time_str)
- self.assertEqual(expected_time_int, actual_time_int)
-
- actual_time_str = payload_cls._convert_int_to_time_string(
- actual_time_int)
+ # NOTE(lbragstad): The token expiration time for Fernet tokens is
+ # passed in the payload of the token. This is different from the token
+ # creation time, which is handled by Fernet and doesn't support
+ # subsecond precision because it is a timestamp integer.
+ self.assertIsInstance(expected_time_float, float)
+
+ actual_time_float = payload_cls._convert_time_string_to_float(
+ original_time_str)
+ self.assertIsInstance(actual_time_float, float)
+ self.assertEqual(expected_time_float, actual_time_float)
+
+ # Generate expected_time_str using the same time float. Using
+ # original_time_str from utils.isotime will occasionally fail due to
+ # floating point rounding differences.
+ time_object = datetime.datetime.utcfromtimestamp(actual_time_float)
+ expected_time_str = utils.isotime(time_object, subsecond=True)
+
+ actual_time_str = payload_cls._convert_float_to_time_string(
+ actual_time_float)
self.assertEqual(expected_time_str, actual_time_str)
- def test_unscoped_payload(self):
- exp_user_id = uuid.uuid4().hex
- exp_methods = ['password']
+ def _test_payload(self, payload_class, exp_user_id=None, exp_methods=None,
+ exp_project_id=None, exp_domain_id=None,
+ exp_trust_id=None, exp_federated_info=None,
+ exp_access_token_id=None):
+ exp_user_id = exp_user_id or uuid.uuid4().hex
+ exp_methods = exp_methods or ['password']
exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
exp_audit_ids = [provider.random_urlsafe_str()]
- payload = token_formatters.UnscopedPayload.assemble(
- exp_user_id, exp_methods, exp_expires_at, exp_audit_ids)
+ payload = payload_class.assemble(
+ exp_user_id, exp_methods, exp_project_id, exp_domain_id,
+ exp_expires_at, exp_audit_ids, exp_trust_id, exp_federated_info,
+ exp_access_token_id)
- (user_id, methods, expires_at, audit_ids) = (
- token_formatters.UnscopedPayload.disassemble(payload))
+ (user_id, methods, project_id,
+ domain_id, expires_at, audit_ids,
+ trust_id, federated_info,
+ access_token_id) = payload_class.disassemble(payload)
self.assertEqual(exp_user_id, user_id)
self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_expires_at, expires_at)
+ self.assertTimestampsEqual(exp_expires_at, expires_at)
self.assertEqual(exp_audit_ids, audit_ids)
-
- def test_project_scoped_payload(self):
- exp_user_id = uuid.uuid4().hex
- exp_methods = ['password']
- exp_project_id = uuid.uuid4().hex
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
-
- payload = token_formatters.ProjectScopedPayload.assemble(
- exp_user_id, exp_methods, exp_project_id, exp_expires_at,
- exp_audit_ids)
-
- (user_id, methods, project_id, expires_at, audit_ids) = (
- token_formatters.ProjectScopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
self.assertEqual(exp_project_id, project_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
+ self.assertEqual(exp_domain_id, domain_id)
+ self.assertEqual(exp_trust_id, trust_id)
+ self.assertEqual(exp_access_token_id, access_token_id)
- def test_domain_scoped_payload(self):
- exp_user_id = uuid.uuid4().hex
- exp_methods = ['password']
- exp_domain_id = uuid.uuid4().hex
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
+ if exp_federated_info:
+ self.assertDictEqual(exp_federated_info, federated_info)
+ else:
+ self.assertIsNone(federated_info)
- payload = token_formatters.DomainScopedPayload.assemble(
- exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
- exp_audit_ids)
+ def test_unscoped_payload(self):
+ self._test_payload(token_formatters.UnscopedPayload)
- (user_id, methods, domain_id, expires_at, audit_ids) = (
- token_formatters.DomainScopedPayload.disassemble(payload))
+ def test_project_scoped_payload(self):
+ self._test_payload(token_formatters.ProjectScopedPayload,
+ exp_project_id=uuid.uuid4().hex)
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_domain_id, domain_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
+ def test_domain_scoped_payload(self):
+ self._test_payload(token_formatters.DomainScopedPayload,
+ exp_domain_id=uuid.uuid4().hex)
def test_domain_scoped_payload_with_default_domain(self):
- exp_user_id = uuid.uuid4().hex
- exp_methods = ['password']
- exp_domain_id = CONF.identity.default_domain_id
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
-
- payload = token_formatters.DomainScopedPayload.assemble(
- exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
- exp_audit_ids)
-
- (user_id, methods, domain_id, expires_at, audit_ids) = (
- token_formatters.DomainScopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_domain_id, domain_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
+ self._test_payload(token_formatters.DomainScopedPayload,
+ exp_domain_id=CONF.identity.default_domain_id)
def test_trust_scoped_payload(self):
- exp_user_id = uuid.uuid4().hex
- exp_methods = ['password']
- exp_project_id = uuid.uuid4().hex
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
- exp_trust_id = uuid.uuid4().hex
-
- payload = token_formatters.TrustScopedPayload.assemble(
- exp_user_id, exp_methods, exp_project_id, exp_expires_at,
- exp_audit_ids, exp_trust_id)
-
- (user_id, methods, project_id, expires_at, audit_ids, trust_id) = (
- token_formatters.TrustScopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_project_id, project_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
- self.assertEqual(exp_trust_id, trust_id)
-
- def _test_unscoped_payload_with_user_id(self, exp_user_id):
- exp_methods = ['password']
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
-
- payload = token_formatters.UnscopedPayload.assemble(
- exp_user_id, exp_methods, exp_expires_at, exp_audit_ids)
-
- (user_id, methods, expires_at, audit_ids) = (
- token_formatters.UnscopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
+ self._test_payload(token_formatters.TrustScopedPayload,
+ exp_project_id=uuid.uuid4().hex,
+ exp_trust_id=uuid.uuid4().hex)
def test_unscoped_payload_with_non_uuid_user_id(self):
- self._test_unscoped_payload_with_user_id('someNonUuidUserId')
+ self._test_payload(token_formatters.UnscopedPayload,
+ exp_user_id='someNonUuidUserId')
def test_unscoped_payload_with_16_char_non_uuid_user_id(self):
- self._test_unscoped_payload_with_user_id('0123456789abcdef')
-
- def _test_project_scoped_payload_with_ids(self, exp_user_id,
- exp_project_id):
- exp_methods = ['password']
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
+ self._test_payload(token_formatters.UnscopedPayload,
+ exp_user_id='0123456789abcdef')
- payload = token_formatters.ProjectScopedPayload.assemble(
- exp_user_id, exp_methods, exp_project_id, exp_expires_at,
- exp_audit_ids)
+ def test_project_scoped_payload_with_non_uuid_ids(self):
+ self._test_payload(token_formatters.ProjectScopedPayload,
+ exp_user_id='someNonUuidUserId',
+ exp_project_id='someNonUuidProjectId')
- (user_id, methods, project_id, expires_at, audit_ids) = (
- token_formatters.ProjectScopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_project_id, project_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
-
- def test_project_scoped_payload_with_non_uuid_user_id(self):
- self._test_project_scoped_payload_with_ids('someNonUuidUserId',
- 'someNonUuidProjectId')
-
- def test_project_scoped_payload_with_16_char_non_uuid_user_id(self):
- self._test_project_scoped_payload_with_ids('0123456789abcdef',
- '0123456789abcdef')
-
- def _test_domain_scoped_payload_with_user_id(self, exp_user_id):
- exp_methods = ['password']
- exp_domain_id = uuid.uuid4().hex
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
-
- payload = token_formatters.DomainScopedPayload.assemble(
- exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
- exp_audit_ids)
-
- (user_id, methods, domain_id, expires_at, audit_ids) = (
- token_formatters.DomainScopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_domain_id, domain_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
+ def test_project_scoped_payload_with_16_char_non_uuid_ids(self):
+ self._test_payload(token_formatters.ProjectScopedPayload,
+ exp_user_id='0123456789abcdef',
+ exp_project_id='0123456789abcdef')
def test_domain_scoped_payload_with_non_uuid_user_id(self):
- self._test_domain_scoped_payload_with_user_id('nonUuidUserId')
+ self._test_payload(token_formatters.DomainScopedPayload,
+ exp_user_id='nonUuidUserId',
+ exp_domain_id=uuid.uuid4().hex)
def test_domain_scoped_payload_with_16_char_non_uuid_user_id(self):
- self._test_domain_scoped_payload_with_user_id('0123456789abcdef')
-
- def _test_trust_scoped_payload_with_ids(self, exp_user_id, exp_project_id):
- exp_methods = ['password']
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
- exp_trust_id = uuid.uuid4().hex
-
- payload = token_formatters.TrustScopedPayload.assemble(
- exp_user_id, exp_methods, exp_project_id, exp_expires_at,
- exp_audit_ids, exp_trust_id)
-
- (user_id, methods, project_id, expires_at, audit_ids, trust_id) = (
- token_formatters.TrustScopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_project_id, project_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
- self.assertEqual(exp_trust_id, trust_id)
-
- def test_trust_scoped_payload_with_non_uuid_user_id(self):
- self._test_trust_scoped_payload_with_ids('someNonUuidUserId',
- 'someNonUuidProjectId')
-
- def test_trust_scoped_payload_with_16_char_non_uuid_user_id(self):
- self._test_trust_scoped_payload_with_ids('0123456789abcdef',
- '0123456789abcdef')
+ self._test_payload(token_formatters.DomainScopedPayload,
+ exp_user_id='0123456789abcdef',
+ exp_domain_id=uuid.uuid4().hex)
+
+ def test_trust_scoped_payload_with_non_uuid_ids(self):
+ self._test_payload(token_formatters.TrustScopedPayload,
+ exp_user_id='someNonUuidUserId',
+ exp_project_id='someNonUuidProjectId',
+ exp_trust_id=uuid.uuid4().hex)
+
+ def test_trust_scoped_payload_with_16_char_non_uuid_ids(self):
+ self._test_payload(token_formatters.TrustScopedPayload,
+ exp_user_id='0123456789abcdef',
+ exp_project_id='0123456789abcdef',
+ exp_trust_id=uuid.uuid4().hex)
def _test_federated_payload_with_ids(self, exp_user_id, exp_group_id):
- exp_methods = ['password']
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
exp_federated_info = {'group_ids': [{'id': exp_group_id}],
'idp_id': uuid.uuid4().hex,
'protocol_id': uuid.uuid4().hex}
- payload = token_formatters.FederatedUnscopedPayload.assemble(
- exp_user_id, exp_methods, exp_expires_at, exp_audit_ids,
- exp_federated_info)
-
- (user_id, methods, expires_at, audit_ids, federated_info) = (
- token_formatters.FederatedUnscopedPayload.disassemble(payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
- self.assertEqual(exp_federated_info['group_ids'][0]['id'],
- federated_info['group_ids'][0]['id'])
- self.assertEqual(exp_federated_info['idp_id'],
- federated_info['idp_id'])
- self.assertEqual(exp_federated_info['protocol_id'],
- federated_info['protocol_id'])
+ self._test_payload(token_formatters.FederatedUnscopedPayload,
+ exp_user_id=exp_user_id,
+ exp_federated_info=exp_federated_info)
def test_federated_payload_with_non_uuid_ids(self):
self._test_federated_payload_with_ids('someNonUuidUserId',
@@ -527,56 +447,31 @@ class TestPayloads(unit.TestCase):
'0123456789abcdef')
def test_federated_project_scoped_payload(self):
- exp_user_id = 'someNonUuidUserId'
- exp_methods = ['token']
- exp_project_id = uuid.uuid4().hex
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
'idp_id': uuid.uuid4().hex,
'protocol_id': uuid.uuid4().hex}
- payload = token_formatters.FederatedProjectScopedPayload.assemble(
- exp_user_id, exp_methods, exp_project_id, exp_expires_at,
- exp_audit_ids, exp_federated_info)
-
- (user_id, methods, project_id, expires_at, audit_ids,
- federated_info) = (
- token_formatters.FederatedProjectScopedPayload.disassemble(
- payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_project_id, project_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
- self.assertDictEqual(exp_federated_info, federated_info)
+ self._test_payload(token_formatters.FederatedProjectScopedPayload,
+ exp_user_id='someNonUuidUserId',
+ exp_methods=['token'],
+ exp_project_id=uuid.uuid4().hex,
+ exp_federated_info=exp_federated_info)
def test_federated_domain_scoped_payload(self):
- exp_user_id = 'someNonUuidUserId'
- exp_methods = ['token']
- exp_domain_id = uuid.uuid4().hex
- exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True)
- exp_audit_ids = [provider.random_urlsafe_str()]
exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}],
'idp_id': uuid.uuid4().hex,
'protocol_id': uuid.uuid4().hex}
- payload = token_formatters.FederatedDomainScopedPayload.assemble(
- exp_user_id, exp_methods, exp_domain_id, exp_expires_at,
- exp_audit_ids, exp_federated_info)
+ self._test_payload(token_formatters.FederatedDomainScopedPayload,
+ exp_user_id='someNonUuidUserId',
+ exp_methods=['token'],
+ exp_domain_id=uuid.uuid4().hex,
+ exp_federated_info=exp_federated_info)
- (user_id, methods, domain_id, expires_at, audit_ids,
- federated_info) = (
- token_formatters.FederatedDomainScopedPayload.disassemble(
- payload))
-
- self.assertEqual(exp_user_id, user_id)
- self.assertEqual(exp_methods, methods)
- self.assertEqual(exp_domain_id, domain_id)
- self.assertEqual(exp_expires_at, expires_at)
- self.assertEqual(exp_audit_ids, audit_ids)
- self.assertDictEqual(exp_federated_info, federated_info)
+ def test_oauth_scoped_payload(self):
+ self._test_payload(token_formatters.OauthScopedPayload,
+ exp_project_id=uuid.uuid4().hex,
+ exp_access_token_id=uuid.uuid4().hex)
class TestFernetKeyRotation(unit.TestCase):
@@ -610,7 +505,7 @@ class TestFernetKeyRotation(unit.TestCase):
static set of keys, and simply shuffling them, would fail such a test).
"""
- # Load the keys into a list.
+ # Load the keys into a list, keys is list of six.text_type.
keys = fernet_utils.load_keys()
# Sort the list of keys by the keys themselves (they were previously
@@ -620,7 +515,8 @@ class TestFernetKeyRotation(unit.TestCase):
# Create the thumbprint using all keys in the repository.
signature = hashlib.sha1()
for key in keys:
- signature.update(key)
+ # Need to convert key to six.binary_type for update.
+ signature.update(key.encode('utf-8'))
return signature.hexdigest()
def assertRepositoryState(self, expected_size):
diff --git a/keystone-moon/keystone/tests/unit/token/test_provider.py b/keystone-moon/keystone/tests/unit/token/test_provider.py
index be831484..7093f3ba 100644
--- a/keystone-moon/keystone/tests/unit/token/test_provider.py
+++ b/keystone-moon/keystone/tests/unit/token/test_provider.py
@@ -24,7 +24,7 @@ class TestRandomStrings(unit.BaseTestCase):
def test_strings_can_be_converted_to_bytes(self):
s = provider.random_urlsafe_str()
- self.assertTrue(isinstance(s, six.string_types))
+ self.assertIsInstance(s, six.text_type)
b = provider.random_urlsafe_str_to_bytes(s)
- self.assertTrue(isinstance(b, bytes))
+ self.assertIsInstance(b, six.binary_type)
diff --git a/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py b/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py
index 6114b723..9e8c3889 100644
--- a/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py
+++ b/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py
@@ -28,7 +28,8 @@ class TestTokenDataHelper(unit.TestCase):
def test_v3_token_data_helper_populate_audit_info_string(self):
token_data = {}
- audit_info = base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
+ audit_info_bytes = base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
+ audit_info = audit_info_bytes.decode('utf-8')
self.v3_data_helper._populate_audit_info(token_data, audit_info)
self.assertIn(audit_info, token_data['audit_ids'])
self.assertThat(token_data['audit_ids'], matchers.HasLength(2))
diff --git a/keystone-moon/keystone/tests/unit/token/test_token_model.py b/keystone-moon/keystone/tests/unit/token/test_token_model.py
index f1398491..1cb0ef55 100644
--- a/keystone-moon/keystone/tests/unit/token/test_token_model.py
+++ b/keystone-moon/keystone/tests/unit/token/test_token_model.py
@@ -17,8 +17,8 @@ from oslo_config import cfg
from oslo_utils import timeutils
from six.moves import range
-from keystone.contrib.federation import constants as federation_constants
from keystone import exception
+from keystone.federation import constants as federation_constants
from keystone.models import token_model
from keystone.tests.unit import core
from keystone.tests.unit import test_token_provider
diff --git a/keystone-moon/keystone/tests/unit/trust/__init__.py b/keystone-moon/keystone/tests/unit/trust/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/trust/__init__.py
diff --git a/keystone-moon/keystone/tests/unit/trust/test_backends.py b/keystone-moon/keystone/tests/unit/trust/test_backends.py
new file mode 100644
index 00000000..05df866f
--- /dev/null
+++ b/keystone-moon/keystone/tests/unit/trust/test_backends.py
@@ -0,0 +1,172 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import uuid
+
+from oslo_utils import timeutils
+from six.moves import range
+
+from keystone import exception
+
+
+class TrustTests(object):
+ def create_sample_trust(self, new_id, remaining_uses=None):
+ self.trustor = self.user_foo
+ self.trustee = self.user_two
+ expires_at = datetime.datetime.utcnow().replace(year=2032)
+ trust_data = (self.trust_api.create_trust
+ (new_id,
+ {'trustor_user_id': self.trustor['id'],
+ 'trustee_user_id': self.user_two['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'expires_at': expires_at,
+ 'impersonation': True,
+ 'remaining_uses': remaining_uses},
+ roles=[{"id": "member"},
+ {"id": "other"},
+ {"id": "browser"}]))
+ return trust_data
+
+ def test_delete_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ trust_id = trust_data['id']
+ self.assertIsNotNone(trust_data)
+ trust_data = self.trust_api.get_trust(trust_id)
+ self.assertEqual(new_id, trust_data['id'])
+ self.trust_api.delete_trust(trust_id)
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.get_trust,
+ trust_id)
+
+ def test_delete_trust_not_found(self):
+ trust_id = uuid.uuid4().hex
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.delete_trust,
+ trust_id)
+
+ def test_get_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ trust_id = trust_data['id']
+ self.assertIsNotNone(trust_data)
+ trust_data = self.trust_api.get_trust(trust_id)
+ self.assertEqual(new_id, trust_data['id'])
+ self.trust_api.delete_trust(trust_data['id'])
+
+ def test_get_deleted_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+ self.assertIsNotNone(trust_data)
+ self.assertIsNone(trust_data['deleted_at'])
+ self.trust_api.delete_trust(new_id)
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.get_trust,
+ new_id)
+ deleted_trust = self.trust_api.get_trust(trust_data['id'],
+ deleted=True)
+ self.assertEqual(trust_data['id'], deleted_trust['id'])
+ self.assertIsNotNone(deleted_trust.get('deleted_at'))
+
+ def test_create_trust(self):
+ new_id = uuid.uuid4().hex
+ trust_data = self.create_sample_trust(new_id)
+
+ self.assertEqual(new_id, trust_data['id'])
+ self.assertEqual(self.trustee['id'], trust_data['trustee_user_id'])
+ self.assertEqual(self.trustor['id'], trust_data['trustor_user_id'])
+ self.assertTrue(timeutils.normalize_time(trust_data['expires_at']) >
+ timeutils.utcnow())
+
+ self.assertEqual([{'id': 'member'},
+ {'id': 'other'},
+ {'id': 'browser'}], trust_data['roles'])
+
+ def test_list_trust_by_trustee(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts_for_trustee(self.trustee['id'])
+ self.assertEqual(3, len(trusts))
+ self.assertEqual(trusts[0]["trustee_user_id"], self.trustee['id'])
+ trusts = self.trust_api.list_trusts_for_trustee(self.trustor['id'])
+ self.assertEqual(0, len(trusts))
+
+ def test_list_trust_by_trustor(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts_for_trustor(self.trustor['id'])
+ self.assertEqual(3, len(trusts))
+ self.assertEqual(trusts[0]["trustor_user_id"], self.trustor['id'])
+ trusts = self.trust_api.list_trusts_for_trustor(self.trustee['id'])
+ self.assertEqual(0, len(trusts))
+
+ def test_list_trusts(self):
+ for i in range(3):
+ self.create_sample_trust(uuid.uuid4().hex)
+ trusts = self.trust_api.list_trusts()
+ self.assertEqual(3, len(trusts))
+
+ def test_trust_has_remaining_uses_positive(self):
+ # create a trust with limited uses, check that we have uses left
+ trust_data = self.create_sample_trust(uuid.uuid4().hex,
+ remaining_uses=5)
+ self.assertEqual(5, trust_data['remaining_uses'])
+ # create a trust with unlimited uses, check that we have uses left
+ trust_data = self.create_sample_trust(uuid.uuid4().hex)
+ self.assertIsNone(trust_data['remaining_uses'])
+
+ def test_trust_has_remaining_uses_negative(self):
+ # try to create a trust with no remaining uses, check that it fails
+ self.assertRaises(exception.ValidationError,
+ self.create_sample_trust,
+ uuid.uuid4().hex,
+ remaining_uses=0)
+ # try to create a trust with negative remaining uses,
+ # check that it fails
+ self.assertRaises(exception.ValidationError,
+ self.create_sample_trust,
+ uuid.uuid4().hex,
+ remaining_uses=-12)
+
+ def test_consume_use(self):
+ # consume a trust repeatedly until it has no uses anymore
+ trust_data = self.create_sample_trust(uuid.uuid4().hex,
+ remaining_uses=2)
+ self.trust_api.consume_use(trust_data['id'])
+ t = self.trust_api.get_trust(trust_data['id'])
+ self.assertEqual(1, t['remaining_uses'])
+ self.trust_api.consume_use(trust_data['id'])
+ # This was the last use, the trust isn't available anymore
+ self.assertRaises(exception.TrustNotFound,
+ self.trust_api.get_trust,
+ trust_data['id'])
+
+ def test_duplicate_trusts_not_allowed(self):
+ self.trustor = self.user_foo
+ self.trustee = self.user_two
+ trust_data = {'trustor_user_id': self.trustor['id'],
+ 'trustee_user_id': self.user_two['id'],
+ 'project_id': self.tenant_bar['id'],
+ 'expires_at': timeutils.parse_isotime(
+ '2032-02-18T18:10:00Z'),
+ 'impersonation': True,
+ 'remaining_uses': None}
+ roles = [{"id": "member"},
+ {"id": "other"},
+ {"id": "browser"}]
+ self.trust_api.create_trust(uuid.uuid4().hex, trust_data, roles)
+ self.assertRaises(exception.Conflict,
+ self.trust_api.create_trust,
+ uuid.uuid4().hex,
+ trust_data,
+ roles)
diff --git a/keystone-moon/keystone/tests/unit/utils.py b/keystone-moon/keystone/tests/unit/utils.py
index 17d1de81..e3e49e70 100644
--- a/keystone-moon/keystone/tests/unit/utils.py
+++ b/keystone-moon/keystone/tests/unit/utils.py
@@ -17,13 +17,10 @@ import os
import time
import uuid
-from oslo_log import log
import six
from testtools import testcase
-LOG = log.getLogger(__name__)
-
TZ = None
@@ -72,7 +69,6 @@ def wip(message):
>>> pass
"""
-
def _wip(f):
@six.wraps(f)
def run_test(*args, **kwargs):